1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
62 #ifndef MAX_BITS_PER_WORD
63 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 /* If nonzero, pretend we are allocating at global level. */
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
107 { "const", 0, 0, true, false, false, handle_const_attribute },
108 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
109 { "pure", 0, 0, true, false, false, handle_pure_attribute },
110 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
111 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
112 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
113 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
114 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
115 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
117 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
118 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
119 { "may_alias", 0, 0, false, true, false, NULL },
121 /* ??? format and format_arg are heavy and not supported, which actually
122 prevents support for stdio builtins, which we however declare as part
123 of the common builtins.def contents. */
124 { "format", 3, 3, false, true, true, fake_attribute_handler },
125 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Associates a GNAT tree node to a GCC tree node. It is used in
131 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
132 of `save_gnu_tree' for more info. */
133 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
135 #define GET_GNU_TREE(GNAT_ENTITY) \
136 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
138 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
139 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
141 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
142 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
144 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
145 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
147 #define GET_DUMMY_NODE(GNAT_ENTITY) \
148 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
150 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
151 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
153 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
154 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
156 /* This variable keeps a table for types for each precision so that we only
157 allocate each of them once. Signed and unsigned types are kept separate.
159 Note that these types are only used when fold-const requests something
160 special. Perhaps we should NOT share these types; we'll see how it
162 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
164 /* Likewise for float types, but record these by mode. */
165 static GTY(()) tree float_types[NUM_MACHINE_MODES];
167 /* For each binding contour we allocate a binding_level structure to indicate
168 the binding depth. */
170 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
171 /* The binding level containing this one (the enclosing binding level). */
172 struct gnat_binding_level *chain;
173 /* The BLOCK node for this level. */
175 /* If nonzero, the setjmp buffer that needs to be updated for any
176 variable-sized definition within this context. */
180 /* The binding level currently in effect. */
181 static GTY(()) struct gnat_binding_level *current_binding_level;
183 /* A chain of gnat_binding_level structures awaiting reuse. */
184 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
186 /* An array of global declarations. */
187 static GTY(()) VEC(tree,gc) *global_decls;
189 /* An array of builtin function declarations. */
190 static GTY(()) VEC(tree,gc) *builtin_decls;
192 /* An array of global renaming pointers. */
193 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
195 /* A chain of unused BLOCK nodes. */
196 static GTY((deletable)) tree free_block_chain;
198 static tree merge_sizes (tree, tree, tree, bool, bool);
199 static tree compute_related_constant (tree, tree);
200 static tree split_plus (tree, tree *);
201 static tree float_type_for_precision (int, enum machine_mode);
202 static tree convert_to_fat_pointer (tree, tree);
203 static tree convert_to_thin_pointer (tree, tree);
204 static tree make_descriptor_field (const char *,tree, tree, tree);
205 static bool potential_alignment_gap (tree, tree, tree);
207 /* Initialize the association of GNAT nodes to GCC trees. */
210 init_gnat_to_gnu (void)
212 associate_gnat_to_gnu
213 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
216 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
217 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
218 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
220 If GNU_DECL is zero, a previous association is to be reset. */
223 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
225 /* Check that GNAT_ENTITY is not already defined and that it is being set
226 to something which is a decl. Raise gigi 401 if not. Usually, this
227 means GNAT_ENTITY is defined twice, but occasionally is due to some
229 gcc_assert (!(gnu_decl
230 && (PRESENT_GNU_TREE (gnat_entity)
231 || (!no_check && !DECL_P (gnu_decl)))));
233 SET_GNU_TREE (gnat_entity, gnu_decl);
236 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
237 Return the ..._DECL node that was associated with it. If there is no tree
238 node associated with GNAT_ENTITY, abort.
240 In some cases, such as delayed elaboration or expressions that need to
241 be elaborated only once, GNAT_ENTITY is really not an entity. */
244 get_gnu_tree (Entity_Id gnat_entity)
246 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
247 return GET_GNU_TREE (gnat_entity);
250 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
253 present_gnu_tree (Entity_Id gnat_entity)
255 return PRESENT_GNU_TREE (gnat_entity);
258 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
261 init_dummy_type (void)
264 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
267 /* Make a dummy type corresponding to GNAT_TYPE. */
270 make_dummy_type (Entity_Id gnat_type)
272 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
275 /* If there is an equivalent type, get its underlying type. */
276 if (Present (gnat_underlying))
277 gnat_underlying = Underlying_Type (gnat_underlying);
279 /* If there was no equivalent type (can only happen when just annotating
280 types) or underlying type, go back to the original type. */
281 if (No (gnat_underlying))
282 gnat_underlying = gnat_type;
284 /* If it there already a dummy type, use that one. Else make one. */
285 if (PRESENT_DUMMY_NODE (gnat_underlying))
286 return GET_DUMMY_NODE (gnat_underlying);
288 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
290 gnu_type = make_node (Is_Record_Type (gnat_underlying)
291 ? tree_code_for_record_type (gnat_underlying)
293 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
294 TYPE_DUMMY_P (gnu_type) = 1;
295 TYPE_STUB_DECL (gnu_type)
296 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
297 if (AGGREGATE_TYPE_P (gnu_type))
298 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
300 SET_DUMMY_NODE (gnat_underlying, gnu_type);
305 /* Return nonzero if we are currently in the global binding level. */
308 global_bindings_p (void)
310 return ((force_global || !current_function_decl) ? -1 : 0);
313 /* Enter a new binding level. */
316 gnat_pushlevel (void)
318 struct gnat_binding_level *newlevel = NULL;
320 /* Reuse a struct for this binding level, if there is one. */
321 if (free_binding_level)
323 newlevel = free_binding_level;
324 free_binding_level = free_binding_level->chain;
328 = (struct gnat_binding_level *)
329 ggc_alloc (sizeof (struct gnat_binding_level));
331 /* Use a free BLOCK, if any; otherwise, allocate one. */
332 if (free_block_chain)
334 newlevel->block = free_block_chain;
335 free_block_chain = BLOCK_CHAIN (free_block_chain);
336 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
339 newlevel->block = make_node (BLOCK);
341 /* Point the BLOCK we just made to its parent. */
342 if (current_binding_level)
343 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
345 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
346 TREE_USED (newlevel->block) = 1;
348 /* Add this level to the front of the chain (stack) of levels that are
350 newlevel->chain = current_binding_level;
351 newlevel->jmpbuf_decl = NULL_TREE;
352 current_binding_level = newlevel;
355 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
356 and point FNDECL to this BLOCK. */
359 set_current_block_context (tree fndecl)
361 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
362 DECL_INITIAL (fndecl) = current_binding_level->block;
365 /* Set the jmpbuf_decl for the current binding level to DECL. */
368 set_block_jmpbuf_decl (tree decl)
370 current_binding_level->jmpbuf_decl = decl;
373 /* Get the jmpbuf_decl, if any, for the current binding level. */
376 get_block_jmpbuf_decl (void)
378 return current_binding_level->jmpbuf_decl;
381 /* Exit a binding level. Set any BLOCK into the current code group. */
386 struct gnat_binding_level *level = current_binding_level;
387 tree block = level->block;
389 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
390 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
392 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
393 are no variables free the block and merge its subblocks into those of its
394 parent block. Otherwise, add it to the list of its parent. */
395 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
397 else if (BLOCK_VARS (block) == NULL_TREE)
399 BLOCK_SUBBLOCKS (level->chain->block)
400 = chainon (BLOCK_SUBBLOCKS (block),
401 BLOCK_SUBBLOCKS (level->chain->block));
402 BLOCK_CHAIN (block) = free_block_chain;
403 free_block_chain = block;
407 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
408 BLOCK_SUBBLOCKS (level->chain->block) = block;
409 TREE_USED (block) = 1;
410 set_block_for_group (block);
413 /* Free this binding structure. */
414 current_binding_level = level->chain;
415 level->chain = free_binding_level;
416 free_binding_level = level;
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope
421 and uses GNAT_NODE for location information and propagating flags. */
424 gnat_pushdecl (tree decl, Node_Id gnat_node)
426 /* If this decl is public external or at toplevel, there is no context.
427 But PARM_DECLs always go in the level of its function. */
428 if (TREE_CODE (decl) != PARM_DECL
429 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
430 || global_bindings_p ()))
431 DECL_CONTEXT (decl) = 0;
434 DECL_CONTEXT (decl) = current_function_decl;
436 /* Functions imported in another function are not really nested.
437 For really nested functions mark them initially as needing
438 a static chain for uses of that flag before unnesting;
439 lower_nested_functions will then recompute it. */
440 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
441 DECL_STATIC_CHAIN (decl) = 1;
444 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
446 /* Set the location of DECL and emit a declaration for it. */
447 if (Present (gnat_node))
448 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
449 add_decl_expr (decl, gnat_node);
451 /* Put the declaration on the list. The list of declarations is in reverse
452 order. The list will be reversed later. Put global variables in the
453 globals list and builtin functions in a dedicated list to speed up
454 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
455 the list, as they will cause trouble with the debugger and aren't needed
457 if (TREE_CODE (decl) != TYPE_DECL
458 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
460 if (global_bindings_p ())
462 VEC_safe_push (tree, gc, global_decls, decl);
464 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
465 VEC_safe_push (tree, gc, builtin_decls, decl);
469 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
470 BLOCK_VARS (current_binding_level->block) = decl;
474 /* For the declaration of a type, set its name if it either is not already
475 set or if the previous type name was not derived from a source name.
476 We'd rather have the type named with a real name and all the pointer
477 types to the same object have the same POINTER_TYPE node. Code in the
478 equivalent function of c-decl.c makes a copy of the type node here, but
479 that may cause us trouble with incomplete types. We make an exception
480 for fat pointer types because the compiler automatically builds them
481 for unconstrained array types and the debugger uses them to represent
482 both these and pointers to these. */
483 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
485 tree t = TREE_TYPE (decl);
487 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
489 else if (TYPE_IS_FAT_POINTER_P (t))
491 tree tt = build_variant_type_copy (t);
492 TYPE_NAME (tt) = decl;
493 TREE_USED (tt) = TREE_USED (t);
494 TREE_TYPE (decl) = tt;
495 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
496 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
498 DECL_ORIGINAL_TYPE (decl) = t;
500 DECL_ARTIFICIAL (decl) = 0;
502 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
507 /* Propagate the name to all the variants. This is needed for
508 the type qualifiers machinery to work properly. */
510 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
511 TYPE_NAME (t) = decl;
515 /* Do little here. Set up the standard declarations later after the
516 front end has been run. */
519 gnat_init_decl_processing (void)
521 /* Make the binding_level structure for global names. */
522 current_function_decl = 0;
523 current_binding_level = 0;
524 free_binding_level = 0;
527 build_common_tree_nodes (true, true);
529 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
530 corresponding to the width of Pmode. In most cases when ptr_mode
531 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
532 But we get far better code using the width of Pmode. */
533 size_type_node = gnat_type_for_mode (Pmode, 0);
534 set_sizetype (size_type_node);
536 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
537 boolean_type_node = make_unsigned_type (8);
538 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
539 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
540 build_int_cst (boolean_type_node, 1));
541 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
543 build_common_tree_nodes_2 (0);
544 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
546 ptr_void_type_node = build_pointer_type (void_type_node);
549 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
552 record_builtin_type (const char *name, tree type)
554 tree type_decl = build_decl (input_location,
555 TYPE_DECL, get_identifier (name), type);
557 gnat_pushdecl (type_decl, Empty);
559 if (debug_hooks->type_decl)
560 debug_hooks->type_decl (type_decl, false);
563 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
564 finish constructing the record or union type. If REP_LEVEL is zero, this
565 record has no representation clause and so will be entirely laid out here.
566 If REP_LEVEL is one, this record has a representation clause and has been
567 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
568 this record is derived from a parent record and thus inherits its layout;
569 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
570 true, the record type is expected to be modified afterwards so it will
571 not be sent to the back-end for finalization. */
574 finish_record_type (tree record_type, tree fieldlist, int rep_level,
575 bool do_not_finalize)
577 enum tree_code code = TREE_CODE (record_type);
578 tree name = TYPE_NAME (record_type);
579 tree ada_size = bitsize_zero_node;
580 tree size = bitsize_zero_node;
581 bool had_size = TYPE_SIZE (record_type) != 0;
582 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
583 bool had_align = TYPE_ALIGN (record_type) != 0;
586 TYPE_FIELDS (record_type) = fieldlist;
588 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
589 generate debug info and have a parallel type. */
590 if (name && TREE_CODE (name) == TYPE_DECL)
591 name = DECL_NAME (name);
592 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
594 /* Globally initialize the record first. If this is a rep'ed record,
595 that just means some initializations; otherwise, layout the record. */
598 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
599 SET_TYPE_MODE (record_type, BLKmode);
602 TYPE_SIZE_UNIT (record_type) = size_zero_node;
604 TYPE_SIZE (record_type) = bitsize_zero_node;
606 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
607 out just like a UNION_TYPE, since the size will be fixed. */
608 else if (code == QUAL_UNION_TYPE)
613 /* Ensure there isn't a size already set. There can be in an error
614 case where there is a rep clause but all fields have errors and
615 no longer have a position. */
616 TYPE_SIZE (record_type) = 0;
617 layout_type (record_type);
620 /* At this point, the position and size of each field is known. It was
621 either set before entry by a rep clause, or by laying out the type above.
623 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
624 to compute the Ada size; the GCC size and alignment (for rep'ed records
625 that are not padding types); and the mode (for rep'ed records). We also
626 clear the DECL_BIT_FIELD indication for the cases we know have not been
627 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
629 if (code == QUAL_UNION_TYPE)
630 fieldlist = nreverse (fieldlist);
632 for (field = fieldlist; field; field = TREE_CHAIN (field))
634 tree type = TREE_TYPE (field);
635 tree pos = bit_position (field);
636 tree this_size = DECL_SIZE (field);
639 if ((TREE_CODE (type) == RECORD_TYPE
640 || TREE_CODE (type) == UNION_TYPE
641 || TREE_CODE (type) == QUAL_UNION_TYPE)
642 && !TYPE_FAT_POINTER_P (type)
643 && !TYPE_CONTAINS_TEMPLATE_P (type)
644 && TYPE_ADA_SIZE (type))
645 this_ada_size = TYPE_ADA_SIZE (type);
647 this_ada_size = this_size;
649 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
650 if (DECL_BIT_FIELD (field)
651 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
653 unsigned int align = TYPE_ALIGN (type);
655 /* In the general case, type alignment is required. */
656 if (value_factor_p (pos, align))
658 /* The enclosing record type must be sufficiently aligned.
659 Otherwise, if no alignment was specified for it and it
660 has been laid out already, bump its alignment to the
661 desired one if this is compatible with its size. */
662 if (TYPE_ALIGN (record_type) >= align)
664 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
665 DECL_BIT_FIELD (field) = 0;
669 && value_factor_p (TYPE_SIZE (record_type), align))
671 TYPE_ALIGN (record_type) = align;
672 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
673 DECL_BIT_FIELD (field) = 0;
677 /* In the non-strict alignment case, only byte alignment is. */
678 if (!STRICT_ALIGNMENT
679 && DECL_BIT_FIELD (field)
680 && value_factor_p (pos, BITS_PER_UNIT))
681 DECL_BIT_FIELD (field) = 0;
684 /* If we still have DECL_BIT_FIELD set at this point, we know that the
685 field is technically not addressable. Except that it can actually
686 be addressed if it is BLKmode and happens to be properly aligned. */
687 if (DECL_BIT_FIELD (field)
688 && !(DECL_MODE (field) == BLKmode
689 && value_factor_p (pos, BITS_PER_UNIT)))
690 DECL_NONADDRESSABLE_P (field) = 1;
692 /* A type must be as aligned as its most aligned field that is not
693 a bit-field. But this is already enforced by layout_type. */
694 if (rep_level > 0 && !DECL_BIT_FIELD (field))
695 TYPE_ALIGN (record_type)
696 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
701 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
702 size = size_binop (MAX_EXPR, size, this_size);
705 case QUAL_UNION_TYPE:
707 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
708 this_ada_size, ada_size);
709 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
714 /* Since we know here that all fields are sorted in order of
715 increasing bit position, the size of the record is one
716 higher than the ending bit of the last field processed
717 unless we have a rep clause, since in that case we might
718 have a field outside a QUAL_UNION_TYPE that has a higher ending
719 position. So use a MAX in that case. Also, if this field is a
720 QUAL_UNION_TYPE, we need to take into account the previous size in
721 the case of empty variants. */
723 = merge_sizes (ada_size, pos, this_ada_size,
724 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
726 = merge_sizes (size, pos, this_size,
727 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
735 if (code == QUAL_UNION_TYPE)
736 nreverse (fieldlist);
740 /* If this is a padding record, we never want to make the size smaller
741 than what was specified in it, if any. */
742 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
743 size = TYPE_SIZE (record_type);
745 /* Now set any of the values we've just computed that apply. */
746 if (!TYPE_FAT_POINTER_P (record_type)
747 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
748 SET_TYPE_ADA_SIZE (record_type, ada_size);
752 tree size_unit = had_size_unit
753 ? TYPE_SIZE_UNIT (record_type)
755 size_binop (CEIL_DIV_EXPR, size,
757 unsigned int align = TYPE_ALIGN (record_type);
759 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
760 TYPE_SIZE_UNIT (record_type)
761 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
763 compute_record_mode (record_type);
767 if (!do_not_finalize)
768 rest_of_record_type_compilation (record_type);
771 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
772 the debug information associated with it. It need not be invoked
773 directly in most cases since finish_record_type takes care of doing
774 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
777 rest_of_record_type_compilation (tree record_type)
779 tree fieldlist = TYPE_FIELDS (record_type);
781 enum tree_code code = TREE_CODE (record_type);
782 bool var_size = false;
784 for (field = fieldlist; field; field = TREE_CHAIN (field))
786 /* We need to make an XVE/XVU record if any field has variable size,
787 whether or not the record does. For example, if we have a union,
788 it may be that all fields, rounded up to the alignment, have the
789 same size, in which case we'll use that size. But the debug
790 output routines (except Dwarf2) won't be able to output the fields,
791 so we need to make the special record. */
792 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
793 /* If a field has a non-constant qualifier, the record will have
794 variable size too. */
795 || (code == QUAL_UNION_TYPE
796 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
803 /* If this record is of variable size, rename it so that the
804 debugger knows it is and make a new, parallel, record
805 that tells the debugger how the record is laid out. See
806 exp_dbug.ads. But don't do this for records that are padding
807 since they confuse GDB. */
808 if (var_size && !TYPE_IS_PADDING_P (record_type))
811 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
812 ? UNION_TYPE : TREE_CODE (record_type));
813 tree orig_name = TYPE_NAME (record_type), new_name;
814 tree last_pos = bitsize_zero_node;
815 tree old_field, prev_old_field = NULL_TREE;
817 if (TREE_CODE (orig_name) == TYPE_DECL)
818 orig_name = DECL_NAME (orig_name);
821 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
823 TYPE_NAME (new_record_type) = new_name;
824 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
825 TYPE_STUB_DECL (new_record_type)
826 = create_type_stub_decl (new_name, new_record_type);
827 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
828 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
829 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
830 TYPE_SIZE_UNIT (new_record_type)
831 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
833 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
835 /* Now scan all the fields, replacing each field with a new
836 field corresponding to the new encoding. */
837 for (old_field = TYPE_FIELDS (record_type); old_field;
838 old_field = TREE_CHAIN (old_field))
840 tree field_type = TREE_TYPE (old_field);
841 tree field_name = DECL_NAME (old_field);
843 tree curpos = bit_position (old_field);
845 unsigned int align = 0;
848 /* See how the position was modified from the last position.
850 There are two basic cases we support: a value was added
851 to the last position or the last position was rounded to
852 a boundary and they something was added. Check for the
853 first case first. If not, see if there is any evidence
854 of rounding. If so, round the last position and try
857 If this is a union, the position can be taken as zero. */
859 /* Some computations depend on the shape of the position expression,
860 so strip conversions to make sure it's exposed. */
861 curpos = remove_conversions (curpos, true);
863 if (TREE_CODE (new_record_type) == UNION_TYPE)
864 pos = bitsize_zero_node, align = 0;
866 pos = compute_related_constant (curpos, last_pos);
868 if (!pos && TREE_CODE (curpos) == MULT_EXPR
869 && host_integerp (TREE_OPERAND (curpos, 1), 1))
871 tree offset = TREE_OPERAND (curpos, 0);
872 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
874 /* An offset which is a bitwise AND with a negative power of 2
875 means an alignment corresponding to this power of 2. */
876 offset = remove_conversions (offset, true);
877 if (TREE_CODE (offset) == BIT_AND_EXPR
878 && host_integerp (TREE_OPERAND (offset, 1), 0)
879 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
882 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
883 if (exact_log2 (pow) > 0)
887 pos = compute_related_constant (curpos,
888 round_up (last_pos, align));
890 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
891 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
892 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
893 && host_integerp (TREE_OPERAND
894 (TREE_OPERAND (curpos, 0), 1),
899 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
900 pos = compute_related_constant (curpos,
901 round_up (last_pos, align));
903 else if (potential_alignment_gap (prev_old_field, old_field,
906 align = TYPE_ALIGN (field_type);
907 pos = compute_related_constant (curpos,
908 round_up (last_pos, align));
911 /* If we can't compute a position, set it to zero.
913 ??? We really should abort here, but it's too much work
914 to get this correct for all cases. */
917 pos = bitsize_zero_node;
919 /* See if this type is variable-sized and make a pointer type
920 and indicate the indirection if so. Beware that the debug
921 back-end may adjust the position computed above according
922 to the alignment of the field type, i.e. the pointer type
923 in this case, if we don't preventively counter that. */
924 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
926 field_type = build_pointer_type (field_type);
927 if (align != 0 && TYPE_ALIGN (field_type) > align)
929 field_type = copy_node (field_type);
930 TYPE_ALIGN (field_type) = align;
935 /* Make a new field name, if necessary. */
936 if (var || align != 0)
941 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
942 align / BITS_PER_UNIT);
944 strcpy (suffix, "XVL");
946 field_name = concat_name (field_name, suffix);
949 new_field = create_field_decl (field_name, field_type,
951 DECL_SIZE (old_field), pos, 0);
952 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
953 TYPE_FIELDS (new_record_type) = new_field;
955 /* If old_field is a QUAL_UNION_TYPE, take its size as being
956 zero. The only time it's not the last field of the record
957 is when there are other components at fixed positions after
958 it (meaning there was a rep clause for every field) and we
959 want to be able to encode them. */
960 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
961 (TREE_CODE (TREE_TYPE (old_field))
964 : DECL_SIZE (old_field));
965 prev_old_field = old_field;
968 TYPE_FIELDS (new_record_type)
969 = nreverse (TYPE_FIELDS (new_record_type));
971 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
974 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
977 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
980 add_parallel_type (tree decl, tree parallel_type)
984 while (DECL_PARALLEL_TYPE (d))
985 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
987 SET_DECL_PARALLEL_TYPE (d, parallel_type);
990 /* Return the parallel type associated to a type, if any. */
993 get_parallel_type (tree type)
995 if (TYPE_STUB_DECL (type))
996 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1001 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1002 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1003 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1004 replace a value of zero with the old size. If HAS_REP is true, we take the
1005 MAX of the end position of this field with LAST_SIZE. In all other cases,
1006 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1009 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1012 tree type = TREE_TYPE (last_size);
1015 if (!special || TREE_CODE (size) != COND_EXPR)
1017 new_size = size_binop (PLUS_EXPR, first_bit, size);
1019 new_size = size_binop (MAX_EXPR, last_size, new_size);
1023 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1024 integer_zerop (TREE_OPERAND (size, 1))
1025 ? last_size : merge_sizes (last_size, first_bit,
1026 TREE_OPERAND (size, 1),
1028 integer_zerop (TREE_OPERAND (size, 2))
1029 ? last_size : merge_sizes (last_size, first_bit,
1030 TREE_OPERAND (size, 2),
1033 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1034 when fed through substitute_in_expr) into thinking that a constant
1035 size is not constant. */
1036 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1037 new_size = TREE_OPERAND (new_size, 0);
1042 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1043 related by the addition of a constant. Return that constant if so. */
1046 compute_related_constant (tree op0, tree op1)
1048 tree op0_var, op1_var;
1049 tree op0_con = split_plus (op0, &op0_var);
1050 tree op1_con = split_plus (op1, &op1_var);
1051 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1053 if (operand_equal_p (op0_var, op1_var, 0))
1055 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1061 /* Utility function of above to split a tree OP which may be a sum, into a
1062 constant part, which is returned, and a variable part, which is stored
1063 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1067 split_plus (tree in, tree *pvar)
1069 /* Strip NOPS in order to ease the tree traversal and maximize the
1070 potential for constant or plus/minus discovery. We need to be careful
1071 to always return and set *pvar to bitsizetype trees, but it's worth
1075 *pvar = convert (bitsizetype, in);
1077 if (TREE_CODE (in) == INTEGER_CST)
1079 *pvar = bitsize_zero_node;
1080 return convert (bitsizetype, in);
1082 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1084 tree lhs_var, rhs_var;
1085 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1086 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1088 if (lhs_var == TREE_OPERAND (in, 0)
1089 && rhs_var == TREE_OPERAND (in, 1))
1090 return bitsize_zero_node;
1092 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1093 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1096 return bitsize_zero_node;
1099 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1100 subprogram. If it is void_type_node, then we are dealing with a procedure,
1101 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1102 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1103 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1104 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1105 object. RETURNS_BY_REF is true if the function returns by reference.
1106 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1107 first parameter) the address of the place to copy its result. */
1110 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1111 bool returns_unconstrained, bool returns_by_ref,
1112 bool returns_by_target_ptr)
1114 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1115 the subprogram formal parameters. This list is generated by traversing the
1116 input list of PARM_DECL nodes. */
1117 tree param_type_list = NULL;
1121 for (param_decl = param_decl_list; param_decl;
1122 param_decl = TREE_CHAIN (param_decl))
1123 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1126 /* The list of the function parameter types has to be terminated by the void
1127 type to signal to the back-end that we are not dealing with a variable
1128 parameter subprogram, but that the subprogram has a fixed number of
1130 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1132 /* The list of argument types has been created in reverse
1134 param_type_list = nreverse (param_type_list);
1136 type = build_function_type (return_type, param_type_list);
1138 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1139 or the new type should, make a copy of TYPE. Likewise for
1140 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1141 if (TYPE_CI_CO_LIST (type) || cico_list
1142 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1143 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1144 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1145 type = copy_type (type);
1147 TYPE_CI_CO_LIST (type) = cico_list;
1148 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1149 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1150 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1154 /* Return a copy of TYPE but safe to modify in any way. */
1157 copy_type (tree type)
1159 tree new_type = copy_node (type);
1161 /* copy_node clears this field instead of copying it, because it is
1162 aliased with TREE_CHAIN. */
1163 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1165 TYPE_POINTER_TO (new_type) = 0;
1166 TYPE_REFERENCE_TO (new_type) = 0;
1167 TYPE_MAIN_VARIANT (new_type) = new_type;
1168 TYPE_NEXT_VARIANT (new_type) = 0;
1173 /* Return a subtype of sizetype with range MIN to MAX and whose
1174 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1175 of the associated TYPE_DECL. */
1178 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1180 /* First build a type for the desired range. */
1181 tree type = build_index_2_type (min, max);
1183 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1184 if (TYPE_INDEX_TYPE (type) == index)
1187 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1188 no way of sharing these types, but that's only a small hole. */
1189 if (TYPE_INDEX_TYPE (type))
1190 type = copy_type (type);
1192 SET_TYPE_INDEX_TYPE (type, index);
1193 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1198 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1199 sizetype is used. */
1202 create_range_type (tree type, tree min, tree max)
1206 if (type == NULL_TREE)
1209 /* First build a type with the base range. */
1211 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1213 min = convert (type, min);
1214 max = convert (type, max);
1216 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1217 if (TYPE_RM_MIN_VALUE (range_type)
1218 && TYPE_RM_MAX_VALUE (range_type)
1219 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1220 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1223 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1224 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1225 range_type = copy_type (range_type);
1227 /* Then set the actual range. */
1228 SET_TYPE_RM_MIN_VALUE (range_type, min);
1229 SET_TYPE_RM_MAX_VALUE (range_type, max);
1234 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1235 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1239 create_type_stub_decl (tree type_name, tree type)
1241 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1242 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1243 emitted in DWARF. */
1244 tree type_decl = build_decl (input_location,
1245 TYPE_DECL, type_name, type);
1246 DECL_ARTIFICIAL (type_decl) = 1;
1250 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1251 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1252 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1253 true if we need to write debug information about this type. GNAT_NODE
1254 is used for the position of the decl. */
1257 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1258 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1260 enum tree_code code = TREE_CODE (type);
1261 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1264 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1265 gcc_assert (!TYPE_IS_DUMMY_P (type));
1267 /* If the type hasn't been named yet, we're naming it; preserve an existing
1268 TYPE_STUB_DECL that has been attached to it for some purpose. */
1269 if (!named && TYPE_STUB_DECL (type))
1271 type_decl = TYPE_STUB_DECL (type);
1272 DECL_NAME (type_decl) = type_name;
1275 type_decl = build_decl (input_location,
1276 TYPE_DECL, type_name, type);
1278 DECL_ARTIFICIAL (type_decl) = artificial_p;
1279 gnat_pushdecl (type_decl, gnat_node);
1280 process_attributes (type_decl, attr_list);
1282 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1283 This causes the name to be also viewed as a "tag" by the debug
1284 back-end, with the advantage that no DW_TAG_typedef is emitted
1285 for artificial "tagged" types in DWARF. */
1287 TYPE_STUB_DECL (type) = type_decl;
1289 /* Pass the type declaration to the debug back-end unless this is an
1290 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1291 type for which debugging information was not requested, or else an
1292 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1293 handled separately. And do not pass dummy types either. */
1294 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1295 DECL_IGNORED_P (type_decl) = 1;
1296 else if (code != ENUMERAL_TYPE
1297 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1298 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1299 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1300 && !(code == RECORD_TYPE
1302 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1303 rest_of_type_decl_compilation (type_decl);
1308 /* Return a VAR_DECL or CONST_DECL node.
1310 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1311 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1312 the GCC tree for an optional initial expression; NULL_TREE if none.
1314 CONST_FLAG is true if this variable is constant, in which case we might
1315 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1317 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1318 definition to be made visible outside of the current compilation unit, for
1319 instance variable definitions in a package specification.
1321 EXTERN_FLAG is true when processing an external variable declaration (as
1322 opposed to a definition: no storage is to be allocated for the variable).
1324 STATIC_FLAG is only relevant when not at top level. In that case
1325 it indicates whether to always allocate storage to the variable.
1327 GNAT_NODE is used for the position of the decl. */
1330 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1331 bool const_flag, bool public_flag, bool extern_flag,
1332 bool static_flag, bool const_decl_allowed_p,
1333 struct attrib *attr_list, Node_Id gnat_node)
1337 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1338 && (global_bindings_p () || static_flag
1339 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1340 : TREE_CONSTANT (var_init)));
1342 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1343 case the initializer may be used in-lieu of the DECL node (as done in
1344 Identifier_to_gnu). This is useful to prevent the need of elaboration
1345 code when an identifier for which such a decl is made is in turn used as
1346 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1347 but extra constraints apply to this choice (see below) and are not
1348 relevant to the distinction we wish to make. */
1349 bool constant_p = const_flag && init_const;
1351 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1352 and may be used for scalars in general but not for aggregates. */
1354 = build_decl (input_location,
1355 (constant_p && const_decl_allowed_p
1356 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1359 /* If this is external, throw away any initializations (they will be done
1360 elsewhere) unless this is a constant for which we would like to remain
1361 able to get the initializer. If we are defining a global here, leave a
1362 constant initialization and save any variable elaborations for the
1363 elaboration routine. If we are just annotating types, throw away the
1364 initialization if it isn't a constant. */
1365 if ((extern_flag && !constant_p)
1366 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1367 var_init = NULL_TREE;
1369 /* At the global level, an initializer requiring code to be generated
1370 produces elaboration statements. Check that such statements are allowed,
1371 that is, not violating a No_Elaboration_Code restriction. */
1372 if (global_bindings_p () && var_init != 0 && ! init_const)
1373 Check_Elaboration_Code_Allowed (gnat_node);
1375 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1376 try to fiddle with DECL_COMMON. However, on platforms that don't
1377 support global BSS sections, uninitialized global variables would
1378 go in DATA instead, thus increasing the size of the executable. */
1380 && TREE_CODE (var_decl) == VAR_DECL
1381 && !have_global_bss_p ())
1382 DECL_COMMON (var_decl) = 1;
1383 DECL_INITIAL (var_decl) = var_init;
1384 TREE_READONLY (var_decl) = const_flag;
1385 DECL_EXTERNAL (var_decl) = extern_flag;
1386 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1387 TREE_CONSTANT (var_decl) = constant_p;
1388 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1389 = TYPE_VOLATILE (type);
1391 /* If it's public and not external, always allocate storage for it.
1392 At the global binding level we need to allocate static storage for the
1393 variable if and only if it's not external. If we are not at the top level
1394 we allocate automatic storage unless requested not to. */
1395 TREE_STATIC (var_decl)
1396 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1398 /* For an external constant whose initializer is not absolute, do not emit
1399 debug info. In DWARF this would mean a global relocation in a read-only
1400 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1403 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1404 != null_pointer_node)
1405 DECL_IGNORED_P (var_decl) = 1;
1407 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1408 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1410 process_attributes (var_decl, attr_list);
1412 /* Add this decl to the current binding level. */
1413 gnat_pushdecl (var_decl, gnat_node);
1415 if (TREE_SIDE_EFFECTS (var_decl))
1416 TREE_ADDRESSABLE (var_decl) = 1;
1418 if (TREE_CODE (var_decl) != CONST_DECL)
1420 if (global_bindings_p ())
1421 rest_of_decl_compilation (var_decl, true, 0);
1424 expand_decl (var_decl);
1429 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1432 aggregate_type_contains_array_p (tree type)
1434 switch (TREE_CODE (type))
1438 case QUAL_UNION_TYPE:
1441 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1442 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1443 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1456 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1457 its type and RECORD_TYPE is the type of the enclosing record. PACKED is
1458 1 if the enclosing record is packed, -1 if it has Component_Alignment of
1459 Storage_Unit. If SIZE is nonzero, it is the specified size of the field.
1460 If POS is nonzero, it is the bit position. If ADDRESSABLE is nonzero, it
1461 means we are allowed to take the address of the field; if it is negative,
1462 we should not make a bitfield, which is used by make_aligning_type. */
1465 create_field_decl (tree field_name, tree field_type, tree record_type,
1466 int packed, tree size, tree pos, int addressable)
1468 tree field_decl = build_decl (input_location,
1469 FIELD_DECL, field_name, field_type);
1471 DECL_CONTEXT (field_decl) = record_type;
1472 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1474 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1475 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1476 Likewise for an aggregate without specified position that contains an
1477 array, because in this case slices of variable length of this array
1478 must be handled by GCC and variable-sized objects need to be aligned
1479 to at least a byte boundary. */
1480 if (packed && (TYPE_MODE (field_type) == BLKmode
1482 && AGGREGATE_TYPE_P (field_type)
1483 && aggregate_type_contains_array_p (field_type))))
1484 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1486 /* If a size is specified, use it. Otherwise, if the record type is packed
1487 compute a size to use, which may differ from the object's natural size.
1488 We always set a size in this case to trigger the checks for bitfield
1489 creation below, which is typically required when no position has been
1492 size = convert (bitsizetype, size);
1493 else if (packed == 1)
1495 size = rm_size (field_type);
1496 if (TYPE_MODE (field_type) == BLKmode)
1497 size = round_up (size, BITS_PER_UNIT);
1500 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1501 specified for two reasons: first if the size differs from the natural
1502 size. Second, if the alignment is insufficient. There are a number of
1503 ways the latter can be true.
1505 We never make a bitfield if the type of the field has a nonconstant size,
1506 because no such entity requiring bitfield operations should reach here.
1508 We do *preventively* make a bitfield when there might be the need for it
1509 but we don't have all the necessary information to decide, as is the case
1510 of a field with no specified position in a packed record.
1512 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1513 in layout_decl or finish_record_type to clear the bit_field indication if
1514 it is in fact not needed. */
1515 if (addressable >= 0
1517 && TREE_CODE (size) == INTEGER_CST
1518 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1519 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1520 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1522 || (TYPE_ALIGN (record_type) != 0
1523 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1525 DECL_BIT_FIELD (field_decl) = 1;
1526 DECL_SIZE (field_decl) = size;
1527 if (!packed && !pos)
1529 if (TYPE_ALIGN (record_type) != 0
1530 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1531 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1533 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1537 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1539 /* Bump the alignment if need be, either for bitfield/packing purposes or
1540 to satisfy the type requirements if no such consideration applies. When
1541 we get the alignment from the type, indicate if this is from an explicit
1542 user request, which prevents stor-layout from lowering it later on. */
1544 unsigned int bit_align
1545 = (DECL_BIT_FIELD (field_decl) ? 1
1546 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1548 if (bit_align > DECL_ALIGN (field_decl))
1549 DECL_ALIGN (field_decl) = bit_align;
1550 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1552 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1553 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1559 /* We need to pass in the alignment the DECL is known to have.
1560 This is the lowest-order bit set in POS, but no more than
1561 the alignment of the record, if one is specified. Note
1562 that an alignment of 0 is taken as infinite. */
1563 unsigned int known_align;
1565 if (host_integerp (pos, 1))
1566 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1568 known_align = BITS_PER_UNIT;
1570 if (TYPE_ALIGN (record_type)
1571 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1572 known_align = TYPE_ALIGN (record_type);
1574 layout_decl (field_decl, known_align);
1575 SET_DECL_OFFSET_ALIGN (field_decl,
1576 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1578 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1579 &DECL_FIELD_BIT_OFFSET (field_decl),
1580 DECL_OFFSET_ALIGN (field_decl), pos);
1583 /* In addition to what our caller says, claim the field is addressable if we
1584 know that its type is not suitable.
1586 The field may also be "technically" nonaddressable, meaning that even if
1587 we attempt to take the field's address we will actually get the address
1588 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1589 value we have at this point is not accurate enough, so we don't account
1590 for this here and let finish_record_type decide. */
1591 if (!addressable && !type_for_nonaliased_component_p (field_type))
1594 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1599 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1600 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1601 (either an In parameter or an address of a pass-by-ref parameter). */
1604 create_param_decl (tree param_name, tree param_type, bool readonly)
1606 tree param_decl = build_decl (input_location,
1607 PARM_DECL, param_name, param_type);
1609 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1610 can lead to various ABI violations. */
1611 if (targetm.calls.promote_prototypes (NULL_TREE)
1612 && INTEGRAL_TYPE_P (param_type)
1613 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1615 /* We have to be careful about biased types here. Make a subtype
1616 of integer_type_node with the proper biasing. */
1617 if (TREE_CODE (param_type) == INTEGER_TYPE
1618 && TYPE_BIASED_REPRESENTATION_P (param_type))
1621 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1622 TREE_TYPE (subtype) = integer_type_node;
1623 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1624 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1625 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1626 param_type = subtype;
1629 param_type = integer_type_node;
1632 DECL_ARG_TYPE (param_decl) = param_type;
1633 TREE_READONLY (param_decl) = readonly;
1637 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1640 process_attributes (tree decl, struct attrib *attr_list)
1642 for (; attr_list; attr_list = attr_list->next)
1643 switch (attr_list->type)
1645 case ATTR_MACHINE_ATTRIBUTE:
1646 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1648 ATTR_FLAG_TYPE_IN_PLACE);
1651 case ATTR_LINK_ALIAS:
1652 if (! DECL_EXTERNAL (decl))
1654 TREE_STATIC (decl) = 1;
1655 assemble_alias (decl, attr_list->name);
1659 case ATTR_WEAK_EXTERNAL:
1661 declare_weak (decl);
1663 post_error ("?weak declarations not supported on this target",
1664 attr_list->error_point);
1667 case ATTR_LINK_SECTION:
1668 if (targetm.have_named_sections)
1670 DECL_SECTION_NAME (decl)
1671 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1672 IDENTIFIER_POINTER (attr_list->name));
1673 DECL_COMMON (decl) = 0;
1676 post_error ("?section attributes are not supported for this target",
1677 attr_list->error_point);
1680 case ATTR_LINK_CONSTRUCTOR:
1681 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1682 TREE_USED (decl) = 1;
1685 case ATTR_LINK_DESTRUCTOR:
1686 DECL_STATIC_DESTRUCTOR (decl) = 1;
1687 TREE_USED (decl) = 1;
1690 case ATTR_THREAD_LOCAL_STORAGE:
1691 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1692 DECL_COMMON (decl) = 0;
1697 /* Record DECL as a global renaming pointer. */
1700 record_global_renaming_pointer (tree decl)
1702 gcc_assert (DECL_RENAMED_OBJECT (decl));
1703 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1706 /* Invalidate the global renaming pointers. */
1709 invalidate_global_renaming_pointers (void)
1714 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1715 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1717 VEC_free (tree, gc, global_renaming_pointers);
1720 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1724 value_factor_p (tree value, HOST_WIDE_INT factor)
1726 if (host_integerp (value, 1))
1727 return tree_low_cst (value, 1) % factor == 0;
1729 if (TREE_CODE (value) == MULT_EXPR)
1730 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1731 || value_factor_p (TREE_OPERAND (value, 1), factor));
1736 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1737 unless we can prove these 2 fields are laid out in such a way that no gap
1738 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1739 is the distance in bits between the end of PREV_FIELD and the starting
1740 position of CURR_FIELD. It is ignored if null. */
1743 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1745 /* If this is the first field of the record, there cannot be any gap */
1749 /* If the previous field is a union type, then return False: The only
1750 time when such a field is not the last field of the record is when
1751 there are other components at fixed positions after it (meaning there
1752 was a rep clause for every field), in which case we don't want the
1753 alignment constraint to override them. */
1754 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1757 /* If the distance between the end of prev_field and the beginning of
1758 curr_field is constant, then there is a gap if the value of this
1759 constant is not null. */
1760 if (offset && host_integerp (offset, 1))
1761 return !integer_zerop (offset);
1763 /* If the size and position of the previous field are constant,
1764 then check the sum of this size and position. There will be a gap
1765 iff it is not multiple of the current field alignment. */
1766 if (host_integerp (DECL_SIZE (prev_field), 1)
1767 && host_integerp (bit_position (prev_field), 1))
1768 return ((tree_low_cst (bit_position (prev_field), 1)
1769 + tree_low_cst (DECL_SIZE (prev_field), 1))
1770 % DECL_ALIGN (curr_field) != 0);
1772 /* If both the position and size of the previous field are multiples
1773 of the current field alignment, there cannot be any gap. */
1774 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1775 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1778 /* Fallback, return that there may be a potential gap */
1782 /* Returns a LABEL_DECL node for LABEL_NAME. */
1785 create_label_decl (tree label_name)
1787 tree label_decl = build_decl (input_location,
1788 LABEL_DECL, label_name, void_type_node);
1790 DECL_CONTEXT (label_decl) = current_function_decl;
1791 DECL_MODE (label_decl) = VOIDmode;
1792 DECL_SOURCE_LOCATION (label_decl) = input_location;
1797 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1798 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1799 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1800 PARM_DECL nodes chained through the TREE_CHAIN field).
1802 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1803 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1806 create_subprog_decl (tree subprog_name, tree asm_name,
1807 tree subprog_type, tree param_decl_list, bool inline_flag,
1808 bool public_flag, bool extern_flag,
1809 struct attrib *attr_list, Node_Id gnat_node)
1811 tree return_type = TREE_TYPE (subprog_type);
1812 tree subprog_decl = build_decl (input_location,
1813 FUNCTION_DECL, subprog_name, subprog_type);
1815 /* If this is a non-inline function nested inside an inlined external
1816 function, we cannot honor both requests without cloning the nested
1817 function in the current unit since it is private to the other unit.
1818 We could inline the nested function as well but it's probably better
1819 to err on the side of too little inlining. */
1821 && current_function_decl
1822 && DECL_DECLARED_INLINE_P (current_function_decl)
1823 && DECL_EXTERNAL (current_function_decl))
1824 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1826 DECL_EXTERNAL (subprog_decl) = extern_flag;
1827 TREE_PUBLIC (subprog_decl) = public_flag;
1828 TREE_STATIC (subprog_decl) = 1;
1829 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1830 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1831 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1832 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1833 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1834 DECL_RESULT (subprog_decl) = build_decl (input_location,
1835 RESULT_DECL, 0, return_type);
1836 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1837 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1839 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1840 target by-reference return mechanism. This is not supported all the
1841 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1842 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1843 the RESULT_DECL instead - see gnat_genericize for more details. */
1844 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1846 tree result_decl = DECL_RESULT (subprog_decl);
1848 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1849 DECL_BY_REFERENCE (result_decl) = 1;
1854 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1856 /* The expand_main_function circuitry expects "main_identifier_node" to
1857 designate the DECL_NAME of the 'main' entry point, in turn expected
1858 to be declared as the "main" function literally by default. Ada
1859 program entry points are typically declared with a different name
1860 within the binder generated file, exported as 'main' to satisfy the
1861 system expectations. Force main_identifier_node in this case. */
1862 if (asm_name == main_identifier_node)
1863 DECL_NAME (subprog_decl) = main_identifier_node;
1866 process_attributes (subprog_decl, attr_list);
1868 /* Add this decl to the current binding level. */
1869 gnat_pushdecl (subprog_decl, gnat_node);
1871 /* Output the assembler code and/or RTL for the declaration. */
1872 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1874 return subprog_decl;
1877 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1878 body. This routine needs to be invoked before processing the declarations
1879 appearing in the subprogram. */
1882 begin_subprog_body (tree subprog_decl)
1886 current_function_decl = subprog_decl;
1887 announce_function (subprog_decl);
1889 /* Enter a new binding level and show that all the parameters belong to
1892 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1893 param_decl = TREE_CHAIN (param_decl))
1894 DECL_CONTEXT (param_decl) = subprog_decl;
1896 make_decl_rtl (subprog_decl);
1898 /* We handle pending sizes via the elaboration of types, so we don't need to
1899 save them. This causes them to be marked as part of the outer function
1900 and then discarded. */
1901 get_pending_sizes ();
1905 /* Helper for the genericization callback. Return a dereference of VAL
1906 if it is of a reference type. */
1909 convert_from_reference (tree val)
1911 tree value_type, ref;
1913 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1916 value_type = TREE_TYPE (TREE_TYPE (val));
1917 ref = build1 (INDIRECT_REF, value_type, val);
1919 /* See if what we reference is CONST or VOLATILE, which requires
1920 looking into array types to get to the component type. */
1922 while (TREE_CODE (value_type) == ARRAY_TYPE)
1923 value_type = TREE_TYPE (value_type);
1926 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1927 TREE_THIS_VOLATILE (ref)
1928 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1930 TREE_SIDE_EFFECTS (ref)
1931 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1936 /* Helper for the genericization callback. Returns true if T denotes
1937 a RESULT_DECL with DECL_BY_REFERENCE set. */
1940 is_byref_result (tree t)
1942 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1946 /* Tree walking callback for gnat_genericize. Currently ...
1948 o Adjust references to the function's DECL_RESULT if it is marked
1949 DECL_BY_REFERENCE and so has had its type turned into a reference
1950 type at the end of the function compilation. */
1953 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1955 /* This implementation is modeled after what the C++ front-end is
1956 doing, basis of the downstream passes behavior. */
1958 tree stmt = *stmt_p;
1959 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1961 /* If we have a direct mention of the result decl, dereference. */
1962 if (is_byref_result (stmt))
1964 *stmt_p = convert_from_reference (stmt);
1969 /* Otherwise, no need to walk the same tree twice. */
1970 if (pointer_set_contains (p_set, stmt))
1976 /* If we are taking the address of what now is a reference, just get the
1978 if (TREE_CODE (stmt) == ADDR_EXPR
1979 && is_byref_result (TREE_OPERAND (stmt, 0)))
1981 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1985 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1986 else if (TREE_CODE (stmt) == RETURN_EXPR
1987 && TREE_OPERAND (stmt, 0)
1988 && is_byref_result (TREE_OPERAND (stmt, 0)))
1991 /* Don't look inside trees that cannot embed references of interest. */
1992 else if (IS_TYPE_OR_DECL_P (stmt))
1995 pointer_set_insert (p_set, *stmt_p);
2000 /* Perform lowering of Ada trees to GENERIC. In particular:
2002 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2003 and adjust all the references to this decl accordingly. */
2006 gnat_genericize (tree fndecl)
2008 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2009 was handled by simply setting TREE_ADDRESSABLE on the result type.
2010 Everything required to actually pass by invisible ref using the target
2011 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2013 This doesn't work with GCC 4 any more for several reasons. First, the
2014 gimplification process might need the creation of temporaries of this
2015 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2016 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2017 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2018 be explicitly accounted for by the front-end in the function body.
2020 We achieve the complete transformation in two steps:
2022 1/ create_subprog_decl performs early attribute tweaks: it clears
2023 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2024 the result decl. The former ensures that the bit isn't set in the GCC
2025 tree saved for the function, so prevents ICEs on temporary creation.
2026 The latter we use here to trigger the rest of the processing.
2028 2/ This function performs the type transformation on the result decl
2029 and adjusts all the references to this decl from the function body
2032 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2033 strategy, which escapes the gimplifier temporary creation issues by
2034 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2035 on simple specific support code in aggregate_value_p to look at the
2036 target function result decl explicitly. */
2038 struct pointer_set_t *p_set;
2039 tree decl_result = DECL_RESULT (fndecl);
2041 if (!DECL_BY_REFERENCE (decl_result))
2044 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2045 occurrences in the function body using the common tree-walking facility.
2046 We want to see every occurrence of the result decl to adjust the
2047 referencing tree, so need to use our own pointer set to control which
2048 trees should be visited again or not. */
2050 p_set = pointer_set_create ();
2052 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2053 TREE_ADDRESSABLE (decl_result) = 0;
2054 relayout_decl (decl_result);
2056 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2058 pointer_set_destroy (p_set);
2061 /* Finish the definition of the current subprogram BODY and finalize it. */
2064 end_subprog_body (tree body)
2066 tree fndecl = current_function_decl;
2068 /* Mark the BLOCK for this level as being for this function and pop the
2069 level. Since the vars in it are the parameters, clear them. */
2070 BLOCK_VARS (current_binding_level->block) = 0;
2071 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2072 DECL_INITIAL (fndecl) = current_binding_level->block;
2075 /* We handle pending sizes via the elaboration of types, so we don't
2076 need to save them. */
2077 get_pending_sizes ();
2079 /* Mark the RESULT_DECL as being in this subprogram. */
2080 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2082 DECL_SAVED_TREE (fndecl) = body;
2084 current_function_decl = DECL_CONTEXT (fndecl);
2087 /* We cannot track the location of errors past this point. */
2088 error_gnat_node = Empty;
2090 /* If we're only annotating types, don't actually compile this function. */
2091 if (type_annotate_only)
2094 /* Perform the required pre-gimplification transformations on the tree. */
2095 gnat_genericize (fndecl);
2097 /* Dump functions before gimplification. */
2098 dump_function (TDI_original, fndecl);
2100 /* ??? This special handling of nested functions is probably obsolete. */
2101 if (!DECL_CONTEXT (fndecl))
2102 cgraph_finalize_function (fndecl, false);
2104 /* Register this function with cgraph just far enough to get it
2105 added to our parent's nested function list. */
2106 (void) cgraph_node (fndecl);
2110 gnat_builtin_function (tree decl)
2112 gnat_pushdecl (decl, Empty);
2116 /* Return an integer type with the number of bits of precision given by
2117 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2118 it is a signed type. */
2121 gnat_type_for_size (unsigned precision, int unsignedp)
2126 if (precision <= 2 * MAX_BITS_PER_WORD
2127 && signed_and_unsigned_types[precision][unsignedp])
2128 return signed_and_unsigned_types[precision][unsignedp];
2131 t = make_unsigned_type (precision);
2133 t = make_signed_type (precision);
2135 if (precision <= 2 * MAX_BITS_PER_WORD)
2136 signed_and_unsigned_types[precision][unsignedp] = t;
2140 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2141 TYPE_NAME (t) = get_identifier (type_name);
2147 /* Likewise for floating-point types. */
2150 float_type_for_precision (int precision, enum machine_mode mode)
2155 if (float_types[(int) mode])
2156 return float_types[(int) mode];
2158 float_types[(int) mode] = t = make_node (REAL_TYPE);
2159 TYPE_PRECISION (t) = precision;
2162 gcc_assert (TYPE_MODE (t) == mode);
2165 sprintf (type_name, "FLOAT_%d", precision);
2166 TYPE_NAME (t) = get_identifier (type_name);
2172 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2173 an unsigned type; otherwise a signed type is returned. */
2176 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2178 if (mode == BLKmode)
2181 if (mode == VOIDmode)
2182 return void_type_node;
2184 if (COMPLEX_MODE_P (mode))
2187 if (SCALAR_FLOAT_MODE_P (mode))
2188 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2190 if (SCALAR_INT_MODE_P (mode))
2191 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2193 if (VECTOR_MODE_P (mode))
2195 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2196 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2198 return build_vector_type_for_mode (inner_type, mode);
2204 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2207 gnat_unsigned_type (tree type_node)
2209 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2211 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2213 type = copy_node (type);
2214 TREE_TYPE (type) = type_node;
2216 else if (TREE_TYPE (type_node)
2217 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2218 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2220 type = copy_node (type);
2221 TREE_TYPE (type) = TREE_TYPE (type_node);
2227 /* Return the signed version of a TYPE_NODE, a scalar type. */
2230 gnat_signed_type (tree type_node)
2232 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2234 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2236 type = copy_node (type);
2237 TREE_TYPE (type) = type_node;
2239 else if (TREE_TYPE (type_node)
2240 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2241 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2243 type = copy_node (type);
2244 TREE_TYPE (type) = TREE_TYPE (type_node);
2250 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2251 transparently converted to each other. */
2254 gnat_types_compatible_p (tree t1, tree t2)
2256 enum tree_code code;
2258 /* This is the default criterion. */
2259 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2262 /* We only check structural equivalence here. */
2263 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2266 /* Vector types are also compatible if they have the same number of subparts
2267 and the same form of (scalar) element type. */
2268 if (code == VECTOR_TYPE
2269 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2270 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2271 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2274 /* Array types are also compatible if they are constrained and have
2275 the same component type and the same domain. */
2276 if (code == ARRAY_TYPE
2277 && TREE_TYPE (t1) == TREE_TYPE (t2)
2278 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2279 || (TYPE_DOMAIN (t1)
2281 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2282 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2283 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2284 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2287 /* Padding record types are also compatible if they pad the same
2288 type and have the same constant size. */
2289 if (code == RECORD_TYPE
2290 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2291 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2292 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2298 /* EXP is an expression for the size of an object. If this size contains
2299 discriminant references, replace them with the maximum (if MAX_P) or
2300 minimum (if !MAX_P) possible value of the discriminant. */
2303 max_size (tree exp, bool max_p)
2305 enum tree_code code = TREE_CODE (exp);
2306 tree type = TREE_TYPE (exp);
2308 switch (TREE_CODE_CLASS (code))
2310 case tcc_declaration:
2315 if (code == CALL_EXPR)
2320 t = maybe_inline_call_in_expr (exp);
2322 return max_size (t, max_p);
2324 n = call_expr_nargs (exp);
2326 argarray = (tree *) alloca (n * sizeof (tree));
2327 for (i = 0; i < n; i++)
2328 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2329 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2334 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2335 modify. Otherwise, we treat it like a variable. */
2336 if (!CONTAINS_PLACEHOLDER_P (exp))
2339 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2341 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2343 case tcc_comparison:
2344 return max_p ? size_one_node : size_zero_node;
2348 case tcc_expression:
2349 switch (TREE_CODE_LENGTH (code))
2352 if (code == NON_LVALUE_EXPR)
2353 return max_size (TREE_OPERAND (exp, 0), max_p);
2356 fold_build1 (code, type,
2357 max_size (TREE_OPERAND (exp, 0),
2358 code == NEGATE_EXPR ? !max_p : max_p));
2361 if (code == COMPOUND_EXPR)
2362 return max_size (TREE_OPERAND (exp, 1), max_p);
2364 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2365 may provide a tighter bound on max_size. */
2366 if (code == MINUS_EXPR
2367 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2369 tree lhs = fold_build2 (MINUS_EXPR, type,
2370 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2371 TREE_OPERAND (exp, 1));
2372 tree rhs = fold_build2 (MINUS_EXPR, type,
2373 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2374 TREE_OPERAND (exp, 1));
2375 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2376 max_size (lhs, max_p),
2377 max_size (rhs, max_p));
2381 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2382 tree rhs = max_size (TREE_OPERAND (exp, 1),
2383 code == MINUS_EXPR ? !max_p : max_p);
2385 /* Special-case wanting the maximum value of a MIN_EXPR.
2386 In that case, if one side overflows, return the other.
2387 sizetype is signed, but we know sizes are non-negative.
2388 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2389 overflowing or the maximum possible value and the RHS
2393 && TREE_CODE (rhs) == INTEGER_CST
2394 && TREE_OVERFLOW (rhs))
2398 && TREE_CODE (lhs) == INTEGER_CST
2399 && TREE_OVERFLOW (lhs))
2401 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2402 && ((TREE_CODE (lhs) == INTEGER_CST
2403 && TREE_OVERFLOW (lhs))
2404 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2405 && !TREE_CONSTANT (rhs))
2408 return fold_build2 (code, type, lhs, rhs);
2412 if (code == SAVE_EXPR)
2414 else if (code == COND_EXPR)
2415 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2416 max_size (TREE_OPERAND (exp, 1), max_p),
2417 max_size (TREE_OPERAND (exp, 2), max_p));
2420 /* Other tree classes cannot happen. */
2428 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2429 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2430 Return a constructor for the template. */
2433 build_template (tree template_type, tree array_type, tree expr)
2435 tree template_elts = NULL_TREE;
2436 tree bound_list = NULL_TREE;
2439 while (TREE_CODE (array_type) == RECORD_TYPE
2440 && (TYPE_PADDING_P (array_type)
2441 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2442 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2444 if (TREE_CODE (array_type) == ARRAY_TYPE
2445 || (TREE_CODE (array_type) == INTEGER_TYPE
2446 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2447 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2449 /* First make the list for a CONSTRUCTOR for the template. Go down the
2450 field list of the template instead of the type chain because this
2451 array might be an Ada array of arrays and we can't tell where the
2452 nested arrays stop being the underlying object. */
2454 for (field = TYPE_FIELDS (template_type); field;
2456 ? (bound_list = TREE_CHAIN (bound_list))
2457 : (array_type = TREE_TYPE (array_type))),
2458 field = TREE_CHAIN (TREE_CHAIN (field)))
2460 tree bounds, min, max;
2462 /* If we have a bound list, get the bounds from there. Likewise
2463 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2464 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2465 This will give us a maximum range. */
2467 bounds = TREE_VALUE (bound_list);
2468 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2469 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2470 else if (expr && TREE_CODE (expr) == PARM_DECL
2471 && DECL_BY_COMPONENT_PTR_P (expr))
2472 bounds = TREE_TYPE (field);
2476 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2477 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2479 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2480 substitute it from OBJECT. */
2481 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2482 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2484 template_elts = tree_cons (TREE_CHAIN (field), max,
2485 tree_cons (field, min, template_elts));
2488 return gnat_build_constructor (template_type, nreverse (template_elts));
2491 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2492 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2493 in the type contains in its DECL_INITIAL the expression to use when
2494 a constructor is made for the type. GNAT_ENTITY is an entity used
2495 to print out an error message if the mechanism cannot be applied to
2496 an object of that type and also for the name. */
2499 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2501 tree record_type = make_node (RECORD_TYPE);
2502 tree pointer32_type;
2503 tree field_list = 0;
2512 /* If TYPE is an unconstrained array, use the underlying array type. */
2513 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2514 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2516 /* If this is an array, compute the number of dimensions in the array,
2517 get the index types, and point to the inner type. */
2518 if (TREE_CODE (type) != ARRAY_TYPE)
2521 for (ndim = 1, inner_type = type;
2522 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2523 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2524 ndim++, inner_type = TREE_TYPE (inner_type))
2527 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2529 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2530 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2531 for (i = ndim - 1, inner_type = type;
2533 i--, inner_type = TREE_TYPE (inner_type))
2534 idx_arr[i] = TYPE_DOMAIN (inner_type);
2536 for (i = 0, inner_type = type;
2538 i++, inner_type = TREE_TYPE (inner_type))
2539 idx_arr[i] = TYPE_DOMAIN (inner_type);
2541 /* Now get the DTYPE value. */
2542 switch (TREE_CODE (type))
2547 if (TYPE_VAX_FLOATING_POINT_P (type))
2548 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2561 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2564 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2567 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2570 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2573 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2576 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2582 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2586 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2587 && TYPE_VAX_FLOATING_POINT_P (type))
2588 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2600 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2611 /* Get the CLASS value. */
2614 case By_Descriptor_A:
2615 case By_Short_Descriptor_A:
2618 case By_Descriptor_NCA:
2619 case By_Short_Descriptor_NCA:
2622 case By_Descriptor_SB:
2623 case By_Short_Descriptor_SB:
2627 case By_Short_Descriptor:
2628 case By_Descriptor_S:
2629 case By_Short_Descriptor_S:
2635 /* Make the type for a descriptor for VMS. The first four fields
2636 are the same for all types. */
2639 = chainon (field_list,
2640 make_descriptor_field
2641 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2642 size_in_bytes ((mech == By_Descriptor_A ||
2643 mech == By_Short_Descriptor_A)
2644 ? inner_type : type)));
2646 field_list = chainon (field_list,
2647 make_descriptor_field ("DTYPE",
2648 gnat_type_for_size (8, 1),
2649 record_type, size_int (dtype)));
2650 field_list = chainon (field_list,
2651 make_descriptor_field ("CLASS",
2652 gnat_type_for_size (8, 1),
2653 record_type, size_int (klass)));
2655 /* Of course this will crash at run-time if the address space is not
2656 within the low 32 bits, but there is nothing else we can do. */
2657 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2660 = chainon (field_list,
2661 make_descriptor_field
2662 ("POINTER", pointer32_type, record_type,
2663 build_unary_op (ADDR_EXPR,
2665 build0 (PLACEHOLDER_EXPR, type))));
2670 case By_Short_Descriptor:
2671 case By_Descriptor_S:
2672 case By_Short_Descriptor_S:
2675 case By_Descriptor_SB:
2676 case By_Short_Descriptor_SB:
2678 = chainon (field_list,
2679 make_descriptor_field
2680 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2681 TREE_CODE (type) == ARRAY_TYPE
2682 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2684 = chainon (field_list,
2685 make_descriptor_field
2686 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2687 TREE_CODE (type) == ARRAY_TYPE
2688 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2691 case By_Descriptor_A:
2692 case By_Short_Descriptor_A:
2693 case By_Descriptor_NCA:
2694 case By_Short_Descriptor_NCA:
2695 field_list = chainon (field_list,
2696 make_descriptor_field ("SCALE",
2697 gnat_type_for_size (8, 1),
2701 field_list = chainon (field_list,
2702 make_descriptor_field ("DIGITS",
2703 gnat_type_for_size (8, 1),
2708 = chainon (field_list,
2709 make_descriptor_field
2710 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2711 size_int ((mech == By_Descriptor_NCA ||
2712 mech == By_Short_Descriptor_NCA)
2714 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2715 : (TREE_CODE (type) == ARRAY_TYPE
2716 && TYPE_CONVENTION_FORTRAN_P (type)
2719 field_list = chainon (field_list,
2720 make_descriptor_field ("DIMCT",
2721 gnat_type_for_size (8, 1),
2725 field_list = chainon (field_list,
2726 make_descriptor_field ("ARSIZE",
2727 gnat_type_for_size (32, 1),
2729 size_in_bytes (type)));
2731 /* Now build a pointer to the 0,0,0... element. */
2732 tem = build0 (PLACEHOLDER_EXPR, type);
2733 for (i = 0, inner_type = type; i < ndim;
2734 i++, inner_type = TREE_TYPE (inner_type))
2735 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2736 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2737 NULL_TREE, NULL_TREE);
2740 = chainon (field_list,
2741 make_descriptor_field
2743 build_pointer_type_for_mode (inner_type, SImode, false),
2746 build_pointer_type_for_mode (inner_type, SImode,
2750 /* Next come the addressing coefficients. */
2751 tem = size_one_node;
2752 for (i = 0; i < ndim; i++)
2756 = size_binop (MULT_EXPR, tem,
2757 size_binop (PLUS_EXPR,
2758 size_binop (MINUS_EXPR,
2759 TYPE_MAX_VALUE (idx_arr[i]),
2760 TYPE_MIN_VALUE (idx_arr[i])),
2763 fname[0] = ((mech == By_Descriptor_NCA ||
2764 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2765 fname[1] = '0' + i, fname[2] = 0;
2767 = chainon (field_list,
2768 make_descriptor_field (fname,
2769 gnat_type_for_size (32, 1),
2770 record_type, idx_length));
2772 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2776 /* Finally here are the bounds. */
2777 for (i = 0; i < ndim; i++)
2781 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2783 = chainon (field_list,
2784 make_descriptor_field
2785 (fname, gnat_type_for_size (32, 1), record_type,
2786 TYPE_MIN_VALUE (idx_arr[i])));
2790 = chainon (field_list,
2791 make_descriptor_field
2792 (fname, gnat_type_for_size (32, 1), record_type,
2793 TYPE_MAX_VALUE (idx_arr[i])));
2798 post_error ("unsupported descriptor type for &", gnat_entity);
2801 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2802 finish_record_type (record_type, field_list, 0, true);
2806 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2807 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2808 in the type contains in its DECL_INITIAL the expression to use when
2809 a constructor is made for the type. GNAT_ENTITY is an entity used
2810 to print out an error message if the mechanism cannot be applied to
2811 an object of that type and also for the name. */
2814 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2816 tree record64_type = make_node (RECORD_TYPE);
2817 tree pointer64_type;
2818 tree field_list64 = 0;
2827 /* If TYPE is an unconstrained array, use the underlying array type. */
2828 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2829 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2831 /* If this is an array, compute the number of dimensions in the array,
2832 get the index types, and point to the inner type. */
2833 if (TREE_CODE (type) != ARRAY_TYPE)
2836 for (ndim = 1, inner_type = type;
2837 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2838 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2839 ndim++, inner_type = TREE_TYPE (inner_type))
2842 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2844 if (mech != By_Descriptor_NCA
2845 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2846 for (i = ndim - 1, inner_type = type;
2848 i--, inner_type = TREE_TYPE (inner_type))
2849 idx_arr[i] = TYPE_DOMAIN (inner_type);
2851 for (i = 0, inner_type = type;
2853 i++, inner_type = TREE_TYPE (inner_type))
2854 idx_arr[i] = TYPE_DOMAIN (inner_type);
2856 /* Now get the DTYPE value. */
2857 switch (TREE_CODE (type))
2862 if (TYPE_VAX_FLOATING_POINT_P (type))
2863 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2876 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2879 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2882 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2885 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2888 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2891 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2897 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2901 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2902 && TYPE_VAX_FLOATING_POINT_P (type))
2903 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2915 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2926 /* Get the CLASS value. */
2929 case By_Descriptor_A:
2932 case By_Descriptor_NCA:
2935 case By_Descriptor_SB:
2939 case By_Descriptor_S:
2945 /* Make the type for a 64bit descriptor for VMS. The first six fields
2946 are the same for all types. */
2948 field_list64 = chainon (field_list64,
2949 make_descriptor_field ("MBO",
2950 gnat_type_for_size (16, 1),
2951 record64_type, size_int (1)));
2953 field_list64 = chainon (field_list64,
2954 make_descriptor_field ("DTYPE",
2955 gnat_type_for_size (8, 1),
2956 record64_type, size_int (dtype)));
2957 field_list64 = chainon (field_list64,
2958 make_descriptor_field ("CLASS",
2959 gnat_type_for_size (8, 1),
2960 record64_type, size_int (klass)));
2962 field_list64 = chainon (field_list64,
2963 make_descriptor_field ("MBMO",
2964 gnat_type_for_size (32, 1),
2965 record64_type, ssize_int (-1)));
2968 = chainon (field_list64,
2969 make_descriptor_field
2970 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2971 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2973 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2976 = chainon (field_list64,
2977 make_descriptor_field
2978 ("POINTER", pointer64_type, record64_type,
2979 build_unary_op (ADDR_EXPR,
2981 build0 (PLACEHOLDER_EXPR, type))));
2986 case By_Descriptor_S:
2989 case By_Descriptor_SB:
2991 = chainon (field_list64,
2992 make_descriptor_field
2993 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2994 TREE_CODE (type) == ARRAY_TYPE
2995 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2997 = chainon (field_list64,
2998 make_descriptor_field
2999 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3000 TREE_CODE (type) == ARRAY_TYPE
3001 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3004 case By_Descriptor_A:
3005 case By_Descriptor_NCA:
3006 field_list64 = chainon (field_list64,
3007 make_descriptor_field ("SCALE",
3008 gnat_type_for_size (8, 1),
3012 field_list64 = chainon (field_list64,
3013 make_descriptor_field ("DIGITS",
3014 gnat_type_for_size (8, 1),
3019 = chainon (field_list64,
3020 make_descriptor_field
3021 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3022 size_int (mech == By_Descriptor_NCA
3024 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3025 : (TREE_CODE (type) == ARRAY_TYPE
3026 && TYPE_CONVENTION_FORTRAN_P (type)
3029 field_list64 = chainon (field_list64,
3030 make_descriptor_field ("DIMCT",
3031 gnat_type_for_size (8, 1),
3035 field_list64 = chainon (field_list64,
3036 make_descriptor_field ("MBZ",
3037 gnat_type_for_size (32, 1),
3040 field_list64 = chainon (field_list64,
3041 make_descriptor_field ("ARSIZE",
3042 gnat_type_for_size (64, 1),
3044 size_in_bytes (type)));
3046 /* Now build a pointer to the 0,0,0... element. */
3047 tem = build0 (PLACEHOLDER_EXPR, type);
3048 for (i = 0, inner_type = type; i < ndim;
3049 i++, inner_type = TREE_TYPE (inner_type))
3050 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3051 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3052 NULL_TREE, NULL_TREE);
3055 = chainon (field_list64,
3056 make_descriptor_field
3058 build_pointer_type_for_mode (inner_type, DImode, false),
3061 build_pointer_type_for_mode (inner_type, DImode,
3065 /* Next come the addressing coefficients. */
3066 tem = size_one_node;
3067 for (i = 0; i < ndim; i++)
3071 = size_binop (MULT_EXPR, tem,
3072 size_binop (PLUS_EXPR,
3073 size_binop (MINUS_EXPR,
3074 TYPE_MAX_VALUE (idx_arr[i]),
3075 TYPE_MIN_VALUE (idx_arr[i])),
3078 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3079 fname[1] = '0' + i, fname[2] = 0;
3081 = chainon (field_list64,
3082 make_descriptor_field (fname,
3083 gnat_type_for_size (64, 1),
3084 record64_type, idx_length));
3086 if (mech == By_Descriptor_NCA)
3090 /* Finally here are the bounds. */
3091 for (i = 0; i < ndim; i++)
3095 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3097 = chainon (field_list64,
3098 make_descriptor_field
3099 (fname, gnat_type_for_size (64, 1), record64_type,
3100 TYPE_MIN_VALUE (idx_arr[i])));
3104 = chainon (field_list64,
3105 make_descriptor_field
3106 (fname, gnat_type_for_size (64, 1), record64_type,
3107 TYPE_MAX_VALUE (idx_arr[i])));
3112 post_error ("unsupported descriptor type for &", gnat_entity);
3115 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3116 finish_record_type (record64_type, field_list64, 0, true);
3117 return record64_type;
3120 /* Utility routine for above code to make a field. */
3123 make_descriptor_field (const char *name, tree type,
3124 tree rec_type, tree initial)
3127 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3129 DECL_INITIAL (field) = initial;
3133 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3134 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3135 which the VMS descriptor is passed. */
3138 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3140 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3141 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3142 /* The CLASS field is the 3rd field in the descriptor. */
3143 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3144 /* The POINTER field is the 6th field in the descriptor. */
3145 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
3147 /* Retrieve the value of the POINTER field. */
3149 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3151 if (POINTER_TYPE_P (gnu_type))
3152 return convert (gnu_type, gnu_expr64);
3154 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3156 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3157 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3158 tree template_type = TREE_TYPE (p_bounds_type);
3159 tree min_field = TYPE_FIELDS (template_type);
3160 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3161 tree template_tree, template_addr, aflags, dimct, t, u;
3162 /* See the head comment of build_vms_descriptor. */
3163 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3164 tree lfield, ufield;
3166 /* Convert POINTER to the type of the P_ARRAY field. */
3167 gnu_expr64 = convert (p_array_type, gnu_expr64);
3171 case 1: /* Class S */
3172 case 15: /* Class SB */
3173 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3174 t = TREE_CHAIN (TREE_CHAIN (klass));
3175 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3176 t = tree_cons (min_field,
3177 convert (TREE_TYPE (min_field), integer_one_node),
3178 tree_cons (max_field,
3179 convert (TREE_TYPE (max_field), t),
3181 template_tree = gnat_build_constructor (template_type, t);
3182 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3184 /* For class S, we are done. */
3188 /* Test that we really have a SB descriptor, like DEC Ada. */
3189 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3190 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3191 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3192 /* If so, there is already a template in the descriptor and
3193 it is located right after the POINTER field. The fields are
3194 64bits so they must be repacked. */
3195 t = TREE_CHAIN (pointer64);
3196 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3197 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3200 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3202 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3204 /* Build the template in the form of a constructor. */
3205 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3206 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3207 ufield, NULL_TREE));
3208 template_tree = gnat_build_constructor (template_type, t);
3210 /* Otherwise use the {1, LENGTH} template we build above. */
3211 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3212 build_unary_op (ADDR_EXPR, p_bounds_type,
3217 case 4: /* Class A */
3218 /* The AFLAGS field is the 3rd field after the pointer in the
3220 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3221 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3222 /* The DIMCT field is the next field in the descriptor after
3225 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3226 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3227 or FL_COEFF or FL_BOUNDS not set. */
3228 u = build_int_cst (TREE_TYPE (aflags), 192);
3229 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3230 build_binary_op (NE_EXPR, integer_type_node,
3232 convert (TREE_TYPE (dimct),
3234 build_binary_op (NE_EXPR, integer_type_node,
3235 build2 (BIT_AND_EXPR,
3239 /* There is already a template in the descriptor and it is located
3240 in block 3. The fields are 64bits so they must be repacked. */
3241 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3243 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3244 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3247 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3249 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3251 /* Build the template in the form of a constructor. */
3252 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3253 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3254 ufield, NULL_TREE));
3255 template_tree = gnat_build_constructor (template_type, t);
3256 template_tree = build3 (COND_EXPR, template_type, u,
3257 build_call_raise (CE_Length_Check_Failed, Empty,
3258 N_Raise_Constraint_Error),
3261 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3264 case 10: /* Class NCA */
3266 post_error ("unsupported descriptor type for &", gnat_subprog);
3267 template_addr = integer_zero_node;
3271 /* Build the fat pointer in the form of a constructor. */
3272 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3273 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3274 template_addr, NULL_TREE));
3275 return gnat_build_constructor (gnu_type, t);
3282 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3283 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3284 which the VMS descriptor is passed. */
3287 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3289 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3290 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3291 /* The CLASS field is the 3rd field in the descriptor. */
3292 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3293 /* The POINTER field is the 4th field in the descriptor. */
3294 tree pointer = TREE_CHAIN (klass);
3296 /* Retrieve the value of the POINTER field. */
3298 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3300 if (POINTER_TYPE_P (gnu_type))
3301 return convert (gnu_type, gnu_expr32);
3303 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3305 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3306 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3307 tree template_type = TREE_TYPE (p_bounds_type);
3308 tree min_field = TYPE_FIELDS (template_type);
3309 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3310 tree template_tree, template_addr, aflags, dimct, t, u;
3311 /* See the head comment of build_vms_descriptor. */
3312 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3314 /* Convert POINTER to the type of the P_ARRAY field. */
3315 gnu_expr32 = convert (p_array_type, gnu_expr32);
3319 case 1: /* Class S */
3320 case 15: /* Class SB */
3321 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3322 t = TYPE_FIELDS (desc_type);
3323 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3324 t = tree_cons (min_field,
3325 convert (TREE_TYPE (min_field), integer_one_node),
3326 tree_cons (max_field,
3327 convert (TREE_TYPE (max_field), t),
3329 template_tree = gnat_build_constructor (template_type, t);
3330 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3332 /* For class S, we are done. */
3336 /* Test that we really have a SB descriptor, like DEC Ada. */
3337 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3338 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3339 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3340 /* If so, there is already a template in the descriptor and
3341 it is located right after the POINTER field. */
3342 t = TREE_CHAIN (pointer);
3344 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3345 /* Otherwise use the {1, LENGTH} template we build above. */
3346 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3347 build_unary_op (ADDR_EXPR, p_bounds_type,
3352 case 4: /* Class A */
3353 /* The AFLAGS field is the 7th field in the descriptor. */
3354 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3355 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3356 /* The DIMCT field is the 8th field in the descriptor. */
3358 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3359 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3360 or FL_COEFF or FL_BOUNDS not set. */
3361 u = build_int_cst (TREE_TYPE (aflags), 192);
3362 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3363 build_binary_op (NE_EXPR, integer_type_node,
3365 convert (TREE_TYPE (dimct),
3367 build_binary_op (NE_EXPR, integer_type_node,
3368 build2 (BIT_AND_EXPR,
3372 /* There is already a template in the descriptor and it is
3373 located at the start of block 3 (12th field). */
3374 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3376 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3377 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3378 build_call_raise (CE_Length_Check_Failed, Empty,
3379 N_Raise_Constraint_Error),
3382 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3385 case 10: /* Class NCA */
3387 post_error ("unsupported descriptor type for &", gnat_subprog);
3388 template_addr = integer_zero_node;
3392 /* Build the fat pointer in the form of a constructor. */
3393 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3394 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3395 template_addr, NULL_TREE));
3397 return gnat_build_constructor (gnu_type, t);
3404 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3405 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3406 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3407 VMS descriptor is passed. */
3410 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3411 Entity_Id gnat_subprog)
3413 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3414 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3415 tree mbo = TYPE_FIELDS (desc_type);
3416 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3417 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3418 tree is64bit, gnu_expr32, gnu_expr64;
3420 /* If the field name is not MBO, it must be 32-bit and no alternate.
3421 Otherwise primary must be 64-bit and alternate 32-bit. */
3422 if (strcmp (mbostr, "MBO") != 0)
3423 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3425 /* Build the test for 64-bit descriptor. */
3426 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3427 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3429 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3430 build_binary_op (EQ_EXPR, integer_type_node,
3431 convert (integer_type_node, mbo),
3433 build_binary_op (EQ_EXPR, integer_type_node,
3434 convert (integer_type_node, mbmo),
3435 integer_minus_one_node));
3437 /* Build the 2 possible end results. */
3438 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3439 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3440 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3442 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3445 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3446 and the GNAT node GNAT_SUBPROG. */
3449 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3451 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3452 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3453 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3456 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3457 gnu_param_list = NULL_TREE;
3459 begin_subprog_body (gnu_stub_decl);
3462 start_stmt_group ();
3464 /* Loop over the parameters of the stub and translate any of them
3465 passed by descriptor into a by reference one. */
3466 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3467 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3469 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3470 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3472 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3474 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3476 DECL_PARM_ALT_TYPE (gnu_stub_param),
3479 gnu_param = gnu_stub_param;
3481 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3484 gnu_body = end_stmt_group ();
3486 /* Invoke the internal subprogram. */
3487 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3489 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3491 nreverse (gnu_param_list));
3493 /* Propagate the return value, if any. */
3494 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3495 append_to_statement_list (gnu_subprog_call, &gnu_body);
3497 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3503 allocate_struct_function (gnu_stub_decl, false);
3504 end_subprog_body (gnu_body);
3507 /* Build a type to be used to represent an aliased object whose nominal
3508 type is an unconstrained array. This consists of a RECORD_TYPE containing
3509 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3510 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3511 is used to represent an arbitrary unconstrained object. Use NAME
3512 as the name of the record. */
3515 build_unc_object_type (tree template_type, tree object_type, tree name)
3517 tree type = make_node (RECORD_TYPE);
3518 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3519 template_type, type, 0, 0, 0, 1);
3520 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3523 TYPE_NAME (type) = name;
3524 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3525 finish_record_type (type,
3526 chainon (chainon (NULL_TREE, template_field),
3533 /* Same, taking a thin or fat pointer type instead of a template type. */
3536 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3541 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3544 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3545 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3546 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3547 return build_unc_object_type (template_type, object_type, name);
3550 /* Shift the component offsets within an unconstrained object TYPE to make it
3551 suitable for use as a designated type for thin pointers. */
3554 shift_unc_components_for_thin_pointers (tree type)
3556 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3557 allocated past the BOUNDS template. The designated type is adjusted to
3558 have ARRAY at position zero and the template at a negative offset, so
3559 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3561 tree bounds_field = TYPE_FIELDS (type);
3562 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3564 DECL_FIELD_OFFSET (bounds_field)
3565 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3567 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3568 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3571 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3572 In the normal case this is just two adjustments, but we have more to
3573 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3576 update_pointer_to (tree old_type, tree new_type)
3578 tree ptr = TYPE_POINTER_TO (old_type);
3579 tree ref = TYPE_REFERENCE_TO (old_type);
3583 /* If this is the main variant, process all the other variants first. */
3584 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3585 for (type = TYPE_NEXT_VARIANT (old_type); type;
3586 type = TYPE_NEXT_VARIANT (type))
3587 update_pointer_to (type, new_type);
3589 /* If no pointers and no references, we are done. */
3593 /* Merge the old type qualifiers in the new type.
3595 Each old variant has qualifiers for specific reasons, and the new
3596 designated type as well. Each set of qualifiers represents useful
3597 information grabbed at some point, and merging the two simply unifies
3598 these inputs into the final type description.
3600 Consider for instance a volatile type frozen after an access to constant
3601 type designating it; after the designated type's freeze, we get here with
3602 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3603 when the access type was processed. We will make a volatile and readonly
3604 designated type, because that's what it really is.
3606 We might also get here for a non-dummy OLD_TYPE variant with different
3607 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3608 to private record type elaboration (see the comments around the call to
3609 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3610 the qualifiers in those cases too, to avoid accidentally discarding the
3611 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3613 = build_qualified_type (new_type,
3614 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3616 /* If old type and new type are identical, there is nothing to do. */
3617 if (old_type == new_type)
3620 /* Otherwise, first handle the simple case. */
3621 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3623 TYPE_POINTER_TO (new_type) = ptr;
3624 TYPE_REFERENCE_TO (new_type) = ref;
3626 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3627 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3628 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3629 TREE_TYPE (ptr1) = new_type;
3631 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3632 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3633 ref1 = TYPE_NEXT_VARIANT (ref1))
3634 TREE_TYPE (ref1) = new_type;
3637 /* Now deal with the unconstrained array case. In this case the "pointer"
3638 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3639 Turn them into pointers to the correct types using update_pointer_to. */
3640 else if (!TYPE_IS_FAT_POINTER_P (ptr))
3645 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3646 tree array_field = TYPE_FIELDS (ptr);
3647 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3648 tree new_ptr = TYPE_POINTER_TO (new_type);
3652 /* Make pointers to the dummy template point to the real template. */
3654 (TREE_TYPE (TREE_TYPE (bounds_field)),
3655 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3657 /* The references to the template bounds present in the array type
3658 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3659 are updating PTR to make it a full replacement for NEW_PTR as
3660 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3661 to make it of type PTR. */
3662 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3663 build0 (PLACEHOLDER_EXPR, ptr),
3664 bounds_field, NULL_TREE);
3666 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3667 to the dummy array point to it. */
3669 (TREE_TYPE (TREE_TYPE (array_field)),
3670 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3671 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3673 /* Make PTR the pointer to NEW_TYPE. */
3674 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3675 = TREE_TYPE (new_type) = ptr;
3677 /* And show the original pointer NEW_PTR to the debugger. This is the
3678 counterpart of the equivalent processing in gnat_pushdecl when the
3679 unconstrained array type is frozen after access types to it. Note
3680 that update_pointer_to can be invoked multiple times on the same
3681 couple of types because of the type variants. */
3683 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3684 && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3686 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3687 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3689 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3690 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3692 /* Now handle updating the allocation record, what the thin pointer
3693 points to. Update all pointers from the old record into the new
3694 one, update the type of the array field, and recompute the size. */
3695 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3697 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3698 = TREE_TYPE (TREE_TYPE (array_field));
3700 /* The size recomputation needs to account for alignment constraints, so
3701 we let layout_type work it out. This will reset the field offsets to
3702 what they would be in a regular record, so we shift them back to what
3703 we want them to be for a thin pointer designated type afterwards. */
3704 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3705 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3706 TYPE_SIZE (new_obj_rec) = 0;
3707 layout_type (new_obj_rec);
3709 shift_unc_components_for_thin_pointers (new_obj_rec);
3711 /* We are done, at last. */
3712 rest_of_record_type_compilation (ptr);
3716 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3717 unconstrained one. This involves making or finding a template. */
3720 convert_to_fat_pointer (tree type, tree expr)
3722 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3723 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3724 tree etype = TREE_TYPE (expr);
3727 /* If EXPR is null, make a fat pointer that contains null pointers to the
3728 template and array. */
3729 if (integer_zerop (expr))
3731 gnat_build_constructor
3733 tree_cons (TYPE_FIELDS (type),
3734 convert (p_array_type, expr),
3735 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3736 convert (build_pointer_type (template_type),
3740 /* If EXPR is a thin pointer, make template and data from the record.. */
3741 else if (TYPE_IS_THIN_POINTER_P (etype))
3743 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3745 expr = save_expr (expr);
3746 if (TREE_CODE (expr) == ADDR_EXPR)
3747 expr = TREE_OPERAND (expr, 0);
3749 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3751 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3752 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3753 build_component_ref (expr, NULL_TREE,
3754 TREE_CHAIN (fields), false));
3757 /* Otherwise, build the constructor for the template. */
3759 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3761 /* The final result is a constructor for the fat pointer.
3763 If EXPR is an argument of a foreign convention subprogram, the type it
3764 points to is directly the component type. In this case, the expression
3765 type may not match the corresponding FIELD_DECL type at this point, so we
3766 call "convert" here to fix that up if necessary. This type consistency is
3767 required, for instance because it ensures that possible later folding of
3768 COMPONENT_REFs against this constructor always yields something of the
3769 same type as the initial reference.
3771 Note that the call to "build_template" above is still fine because it
3772 will only refer to the provided TEMPLATE_TYPE in this case. */
3774 gnat_build_constructor
3776 tree_cons (TYPE_FIELDS (type),
3777 convert (p_array_type, expr),
3778 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3779 build_unary_op (ADDR_EXPR, NULL_TREE,
3784 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3785 is something that is a fat pointer, so convert to it first if it EXPR
3786 is not already a fat pointer. */
3789 convert_to_thin_pointer (tree type, tree expr)
3791 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3793 = convert_to_fat_pointer
3794 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3796 /* We get the pointer to the data and use a NOP_EXPR to make it the
3798 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3800 expr = build1 (NOP_EXPR, type, expr);
3805 /* Create an expression whose value is that of EXPR,
3806 converted to type TYPE. The TREE_TYPE of the value
3807 is always TYPE. This function implements all reasonable
3808 conversions; callers should filter out those that are
3809 not permitted by the language being compiled. */
3812 convert (tree type, tree expr)
3814 enum tree_code code = TREE_CODE (type);
3815 tree etype = TREE_TYPE (expr);
3816 enum tree_code ecode = TREE_CODE (etype);
3818 /* If EXPR is already the right type, we are done. */
3822 /* If both input and output have padding and are of variable size, do this
3823 as an unchecked conversion. Likewise if one is a mere variant of the
3824 other, so we avoid a pointless unpad/repad sequence. */
3825 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3826 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3827 && (!TREE_CONSTANT (TYPE_SIZE (type))
3828 || !TREE_CONSTANT (TYPE_SIZE (etype))
3829 || gnat_types_compatible_p (type, etype)
3830 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3831 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3834 /* If the output type has padding, convert to the inner type and make a
3835 constructor to build the record, unless a variable size is involved. */
3836 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3838 /* If we previously converted from another type and our type is
3839 of variable size, remove the conversion to avoid the need for
3840 variable-sized temporaries. Likewise for a conversion between
3841 original and packable version. */
3842 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3843 && (!TREE_CONSTANT (TYPE_SIZE (type))
3844 || (ecode == RECORD_TYPE
3845 && TYPE_NAME (etype)
3846 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3847 expr = TREE_OPERAND (expr, 0);
3849 /* If we are just removing the padding from expr, convert the original
3850 object if we have variable size in order to avoid the need for some
3851 variable-sized temporaries. Likewise if the padding is a variant
3852 of the other, so we avoid a pointless unpad/repad sequence. */
3853 if (TREE_CODE (expr) == COMPONENT_REF
3854 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3855 && (!TREE_CONSTANT (TYPE_SIZE (type))
3856 || gnat_types_compatible_p (type,
3857 TREE_TYPE (TREE_OPERAND (expr, 0)))
3858 || (ecode == RECORD_TYPE
3859 && TYPE_NAME (etype)
3860 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3861 return convert (type, TREE_OPERAND (expr, 0));
3863 /* If the inner type is of self-referential size and the expression type
3864 is a record, do this as an unchecked conversion. But first pad the
3865 expression if possible to have the same size on both sides. */
3866 if (TREE_CODE (etype) == RECORD_TYPE
3867 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3869 if (TREE_CONSTANT (TYPE_SIZE (etype)))
3870 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3871 false, false, false, true), expr);
3872 return unchecked_convert (type, expr, false);
3875 /* If we are converting between array types with variable size, do the
3876 final conversion as an unchecked conversion, again to avoid the need
3877 for some variable-sized temporaries. If valid, this conversion is
3878 very likely purely technical and without real effects. */
3879 if (TREE_CODE (etype) == ARRAY_TYPE
3880 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3881 && !TREE_CONSTANT (TYPE_SIZE (etype))
3882 && !TREE_CONSTANT (TYPE_SIZE (type)))
3883 return unchecked_convert (type,
3884 convert (TREE_TYPE (TYPE_FIELDS (type)),
3889 gnat_build_constructor (type,
3890 tree_cons (TYPE_FIELDS (type),
3892 (TYPE_FIELDS (type)),
3897 /* If the input type has padding, remove it and convert to the output type.
3898 The conditions ordering is arranged to ensure that the output type is not
3899 a padding type here, as it is not clear whether the conversion would
3900 always be correct if this was to happen. */
3901 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3905 /* If we have just converted to this padded type, just get the
3906 inner expression. */
3907 if (TREE_CODE (expr) == CONSTRUCTOR
3908 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3909 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3910 == TYPE_FIELDS (etype))
3912 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3914 /* Otherwise, build an explicit component reference. */
3917 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3919 return convert (type, unpadded);
3922 /* If the input is a biased type, adjust first. */
3923 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3924 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3925 fold_convert (TREE_TYPE (etype),
3927 TYPE_MIN_VALUE (etype)));
3929 /* If the input is a justified modular type, we need to extract the actual
3930 object before converting it to any other type with the exceptions of an
3931 unconstrained array or of a mere type variant. It is useful to avoid the
3932 extraction and conversion in the type variant case because it could end
3933 up replacing a VAR_DECL expr by a constructor and we might be about the
3934 take the address of the result. */
3935 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3936 && code != UNCONSTRAINED_ARRAY_TYPE
3937 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3938 return convert (type, build_component_ref (expr, NULL_TREE,
3939 TYPE_FIELDS (etype), false));
3941 /* If converting to a type that contains a template, convert to the data
3942 type and then build the template. */
3943 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3945 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3947 /* If the source already has a template, get a reference to the
3948 associated array only, as we are going to rebuild a template
3949 for the target type anyway. */
3950 expr = maybe_unconstrained_array (expr);
3953 gnat_build_constructor
3955 tree_cons (TYPE_FIELDS (type),
3956 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3957 obj_type, NULL_TREE),
3958 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3959 convert (obj_type, expr), NULL_TREE)));
3962 /* There are some special cases of expressions that we process
3964 switch (TREE_CODE (expr))
3970 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3971 conversion in gnat_expand_expr. NULL_EXPR does not represent
3972 and actual value, so no conversion is needed. */
3973 expr = copy_node (expr);
3974 TREE_TYPE (expr) = type;
3978 /* If we are converting a STRING_CST to another constrained array type,
3979 just make a new one in the proper type. */
3980 if (code == ecode && AGGREGATE_TYPE_P (etype)
3981 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3982 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3984 expr = copy_node (expr);
3985 TREE_TYPE (expr) = type;
3991 /* If we are converting a VECTOR_CST to a mere variant type, just make
3992 a new one in the proper type. */
3993 if (code == ecode && gnat_types_compatible_p (type, etype))
3995 expr = copy_node (expr);
3996 TREE_TYPE (expr) = type;
4001 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4002 a new one in the proper type. */
4003 if (code == ecode && gnat_types_compatible_p (type, etype))
4005 expr = copy_node (expr);
4006 TREE_TYPE (expr) = type;
4010 /* Likewise for a conversion between original and packable version, but
4011 we have to work harder in order to preserve type consistency. */
4013 && code == RECORD_TYPE
4014 && TYPE_NAME (type) == TYPE_NAME (etype))
4016 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4017 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4018 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4019 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4020 unsigned HOST_WIDE_INT idx;
4023 /* Whether we need to clear TREE_CONSTANT et al. on the output
4024 constructor when we convert in place. */
4025 bool clear_constant = false;
4027 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4029 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4030 /* We expect only simple constructors. Otherwise, punt. */
4031 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4034 elt->value = convert (TREE_TYPE (field), value);
4036 /* If packing has made this field a bitfield and the input
4037 value couldn't be emitted statically any more, we need to
4038 clear TREE_CONSTANT on our output. */
4039 if (!clear_constant && TREE_CONSTANT (expr)
4040 && !CONSTRUCTOR_BITFIELD_P (efield)
4041 && CONSTRUCTOR_BITFIELD_P (field)
4042 && !initializer_constant_valid_for_bitfield_p (value))
4043 clear_constant = true;
4045 efield = TREE_CHAIN (efield);
4046 field = TREE_CHAIN (field);
4049 /* If we have been able to match and convert all the input fields
4050 to their output type, convert in place now. We'll fallback to a
4051 view conversion downstream otherwise. */
4054 expr = copy_node (expr);
4055 TREE_TYPE (expr) = type;
4056 CONSTRUCTOR_ELTS (expr) = v;
4058 TREE_CONSTANT (expr) = TREE_STATIC (expr) = false;
4063 /* Likewise for a conversion between array type and vector type with a
4064 compatible representative array. */
4065 else if (code == VECTOR_TYPE
4066 && ecode == ARRAY_TYPE
4067 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4070 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4071 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4072 VEC(constructor_elt,gc) *v;
4073 unsigned HOST_WIDE_INT ix;
4076 /* Build a VECTOR_CST from a *constant* array constructor. */
4077 if (TREE_CONSTANT (expr))
4079 bool constant_p = true;
4081 /* Iterate through elements and check if all constructor
4082 elements are *_CSTs. */
4083 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4084 if (!CONSTANT_CLASS_P (value))
4091 return build_vector_from_ctor (type,
4092 CONSTRUCTOR_ELTS (expr));
4095 /* Otherwise, build a regular vector constructor. */
4096 v = VEC_alloc (constructor_elt, gc, len);
4097 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4099 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4100 elt->index = NULL_TREE;
4103 expr = copy_node (expr);
4104 TREE_TYPE (expr) = type;
4105 CONSTRUCTOR_ELTS (expr) = v;
4110 case UNCONSTRAINED_ARRAY_REF:
4111 /* Convert this to the type of the inner array by getting the address of
4112 the array from the template. */
4113 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4114 build_component_ref (TREE_OPERAND (expr, 0),
4115 get_identifier ("P_ARRAY"),
4117 etype = TREE_TYPE (expr);
4118 ecode = TREE_CODE (etype);
4121 case VIEW_CONVERT_EXPR:
4123 /* GCC 4.x is very sensitive to type consistency overall, and view
4124 conversions thus are very frequent. Even though just "convert"ing
4125 the inner operand to the output type is fine in most cases, it
4126 might expose unexpected input/output type mismatches in special
4127 circumstances so we avoid such recursive calls when we can. */
4128 tree op0 = TREE_OPERAND (expr, 0);
4130 /* If we are converting back to the original type, we can just
4131 lift the input conversion. This is a common occurrence with
4132 switches back-and-forth amongst type variants. */
4133 if (type == TREE_TYPE (op0))
4136 /* Otherwise, if we're converting between two aggregate or vector
4137 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4138 target type in place or to just convert the inner expression. */
4139 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4140 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4142 /* If we are converting between mere variants, we can just
4143 substitute the VIEW_CONVERT_EXPR in place. */
4144 if (gnat_types_compatible_p (type, etype))
4145 return build1 (VIEW_CONVERT_EXPR, type, op0);
4147 /* Otherwise, we may just bypass the input view conversion unless
4148 one of the types is a fat pointer, which is handled by
4149 specialized code below which relies on exact type matching. */
4150 else if (!TYPE_IS_FAT_POINTER_P (type)
4151 && !TYPE_IS_FAT_POINTER_P (etype))
4152 return convert (type, op0);
4158 /* If both types are record types, just convert the pointer and
4159 make a new INDIRECT_REF.
4161 ??? Disable this for now since it causes problems with the
4162 code in build_binary_op for MODIFY_EXPR which wants to
4163 strip off conversions. But that code really is a mess and
4164 we need to do this a much better way some time. */
4166 && (TREE_CODE (type) == RECORD_TYPE
4167 || TREE_CODE (type) == UNION_TYPE)
4168 && (TREE_CODE (etype) == RECORD_TYPE
4169 || TREE_CODE (etype) == UNION_TYPE)
4170 && !TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4171 return build_unary_op (INDIRECT_REF, NULL_TREE,
4172 convert (build_pointer_type (type),
4173 TREE_OPERAND (expr, 0)));
4180 /* Check for converting to a pointer to an unconstrained array. */
4181 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4182 return convert_to_fat_pointer (type, expr);
4184 /* If we are converting between two aggregate or vector types that are mere
4185 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4186 to a vector type from its representative array type. */
4187 else if ((code == ecode
4188 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4189 && gnat_types_compatible_p (type, etype))
4190 || (code == VECTOR_TYPE
4191 && ecode == ARRAY_TYPE
4192 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4194 return build1 (VIEW_CONVERT_EXPR, type, expr);
4196 /* In all other cases of related types, make a NOP_EXPR. */
4197 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4198 || (code == INTEGER_CST && ecode == INTEGER_CST
4199 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4200 return fold_convert (type, expr);
4205 return fold_build1 (CONVERT_EXPR, type, expr);
4208 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4209 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4210 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4211 return unchecked_convert (type, expr, false);
4212 else if (TYPE_BIASED_REPRESENTATION_P (type))
4213 return fold_convert (type,
4214 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4215 convert (TREE_TYPE (type), expr),
4216 TYPE_MIN_VALUE (type)));
4218 /* ... fall through ... */
4222 /* If we are converting an additive expression to an integer type
4223 with lower precision, be wary of the optimization that can be
4224 applied by convert_to_integer. There are 2 problematic cases:
4225 - if the first operand was originally of a biased type,
4226 because we could be recursively called to convert it
4227 to an intermediate type and thus rematerialize the
4228 additive operator endlessly,
4229 - if the expression contains a placeholder, because an
4230 intermediate conversion that changes the sign could
4231 be inserted and thus introduce an artificial overflow
4232 at compile time when the placeholder is substituted. */
4233 if (code == INTEGER_TYPE
4234 && ecode == INTEGER_TYPE
4235 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4236 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4238 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4240 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4241 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4242 || CONTAINS_PLACEHOLDER_P (expr))
4243 return build1 (NOP_EXPR, type, expr);
4246 return fold (convert_to_integer (type, expr));
4249 case REFERENCE_TYPE:
4250 /* If converting between two pointers to records denoting
4251 both a template and type, adjust if needed to account
4252 for any differing offsets, since one might be negative. */
4253 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4256 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4257 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4258 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4259 sbitsize_int (BITS_PER_UNIT));
4261 expr = build1 (NOP_EXPR, type, expr);
4262 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4263 if (integer_zerop (byte_diff))
4266 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4267 fold (convert (sizetype, byte_diff)));
4270 /* If converting to a thin pointer, handle specially. */
4271 if (TYPE_IS_THIN_POINTER_P (type)
4272 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4273 return convert_to_thin_pointer (type, expr);
4275 /* If converting fat pointer to normal pointer, get the pointer to the
4276 array and then convert it. */
4277 else if (TYPE_IS_FAT_POINTER_P (etype))
4278 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4281 return fold (convert_to_pointer (type, expr));
4284 return fold (convert_to_real (type, expr));
4287 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4289 gnat_build_constructor
4290 (type, tree_cons (TYPE_FIELDS (type),
4291 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4294 /* ... fall through ... */
4297 /* In these cases, assume the front-end has validated the conversion.
4298 If the conversion is valid, it will be a bit-wise conversion, so
4299 it can be viewed as an unchecked conversion. */
4300 return unchecked_convert (type, expr, false);
4303 /* This is a either a conversion between a tagged type and some
4304 subtype, which we have to mark as a UNION_TYPE because of
4305 overlapping fields or a conversion of an Unchecked_Union. */
4306 return unchecked_convert (type, expr, false);
4308 case UNCONSTRAINED_ARRAY_TYPE:
4309 /* If the input is a VECTOR_TYPE, convert to the representative
4310 array type first. */
4311 if (ecode == VECTOR_TYPE)
4313 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4314 etype = TREE_TYPE (expr);
4315 ecode = TREE_CODE (etype);
4318 /* If EXPR is a constrained array, take its address, convert it to a
4319 fat pointer, and then dereference it. Likewise if EXPR is a
4320 record containing both a template and a constrained array.
4321 Note that a record representing a justified modular type
4322 always represents a packed constrained array. */
4323 if (ecode == ARRAY_TYPE
4324 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4325 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4326 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4329 (INDIRECT_REF, NULL_TREE,
4330 convert_to_fat_pointer (TREE_TYPE (type),
4331 build_unary_op (ADDR_EXPR,
4334 /* Do something very similar for converting one unconstrained
4335 array to another. */
4336 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4338 build_unary_op (INDIRECT_REF, NULL_TREE,
4339 convert (TREE_TYPE (type),
4340 build_unary_op (ADDR_EXPR,
4346 return fold (convert_to_complex (type, expr));
4353 /* Remove all conversions that are done in EXP. This includes converting
4354 from a padded type or to a justified modular type. If TRUE_ADDRESS
4355 is true, always return the address of the containing object even if
4356 the address is not bit-aligned. */
4359 remove_conversions (tree exp, bool true_address)
4361 switch (TREE_CODE (exp))
4365 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4366 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4368 remove_conversions (VEC_index (constructor_elt,
4369 CONSTRUCTOR_ELTS (exp), 0)->value,
4374 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4375 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4378 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4380 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4389 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4390 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4391 likewise return an expression pointing to the underlying array. */
4394 maybe_unconstrained_array (tree exp)
4396 enum tree_code code = TREE_CODE (exp);
4399 switch (TREE_CODE (TREE_TYPE (exp)))
4401 case UNCONSTRAINED_ARRAY_TYPE:
4402 if (code == UNCONSTRAINED_ARRAY_REF)
4405 = build_unary_op (INDIRECT_REF, NULL_TREE,
4406 build_component_ref (TREE_OPERAND (exp, 0),
4407 get_identifier ("P_ARRAY"),
4409 TREE_READONLY (new_exp) = TREE_STATIC (new_exp)
4410 = TREE_READONLY (exp);
4414 else if (code == NULL_EXPR)
4415 return build1 (NULL_EXPR,
4416 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4417 (TREE_TYPE (TREE_TYPE (exp))))),
4418 TREE_OPERAND (exp, 0));
4421 /* If this is a padded type, convert to the unpadded type and see if
4422 it contains a template. */
4423 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4425 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4426 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4427 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4429 build_component_ref (new_exp, NULL_TREE,
4431 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4434 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4436 build_component_ref (exp, NULL_TREE,
4437 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4447 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4448 TYPE_REPRESENTATIVE_ARRAY. */
4451 maybe_vector_array (tree exp)
4453 tree etype = TREE_TYPE (exp);
4455 if (VECTOR_TYPE_P (etype))
4456 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4461 /* Return true if EXPR is an expression that can be folded as an operand
4462 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4465 can_fold_for_view_convert_p (tree expr)
4469 /* The folder will fold NOP_EXPRs between integral types with the same
4470 precision (in the middle-end's sense). We cannot allow it if the
4471 types don't have the same precision in the Ada sense as well. */
4472 if (TREE_CODE (expr) != NOP_EXPR)
4475 t1 = TREE_TYPE (expr);
4476 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4478 /* Defer to the folder for non-integral conversions. */
4479 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4482 /* Only fold conversions that preserve both precisions. */
4483 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4484 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4490 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4491 If NOTRUNC_P is true, truncation operations should be suppressed.
4493 Special care is required with (source or target) integral types whose
4494 precision is not equal to their size, to make sure we fetch or assign
4495 the value bits whose location might depend on the endianness, e.g.
4497 Rmsize : constant := 8;
4498 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4500 type Bit_Array is array (1 .. Rmsize) of Boolean;
4501 pragma Pack (Bit_Array);
4503 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4505 Value : Int := 2#1000_0001#;
4506 Vbits : Bit_Array := To_Bit_Array (Value);
4508 we expect the 8 bits at Vbits'Address to always contain Value, while
4509 their original location depends on the endianness, at Value'Address
4510 on a little-endian architecture but not on a big-endian one. */
4513 unchecked_convert (tree type, tree expr, bool notrunc_p)
4515 tree etype = TREE_TYPE (expr);
4517 /* If the expression is already the right type, we are done. */
4521 /* If both types types are integral just do a normal conversion.
4522 Likewise for a conversion to an unconstrained array. */
4523 if ((((INTEGRAL_TYPE_P (type)
4524 && !(TREE_CODE (type) == INTEGER_TYPE
4525 && TYPE_VAX_FLOATING_POINT_P (type)))
4526 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4527 || (TREE_CODE (type) == RECORD_TYPE
4528 && TYPE_JUSTIFIED_MODULAR_P (type)))
4529 && ((INTEGRAL_TYPE_P (etype)
4530 && !(TREE_CODE (etype) == INTEGER_TYPE
4531 && TYPE_VAX_FLOATING_POINT_P (etype)))
4532 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4533 || (TREE_CODE (etype) == RECORD_TYPE
4534 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4535 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4537 if (TREE_CODE (etype) == INTEGER_TYPE
4538 && TYPE_BIASED_REPRESENTATION_P (etype))
4540 tree ntype = copy_type (etype);
4541 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4542 TYPE_MAIN_VARIANT (ntype) = ntype;
4543 expr = build1 (NOP_EXPR, ntype, expr);
4546 if (TREE_CODE (type) == INTEGER_TYPE
4547 && TYPE_BIASED_REPRESENTATION_P (type))
4549 tree rtype = copy_type (type);
4550 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4551 TYPE_MAIN_VARIANT (rtype) = rtype;
4552 expr = convert (rtype, expr);
4553 expr = build1 (NOP_EXPR, type, expr);
4556 expr = convert (type, expr);
4559 /* If we are converting to an integral type whose precision is not equal
4560 to its size, first unchecked convert to a record that contains an
4561 object of the output type. Then extract the field. */
4562 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4563 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4564 GET_MODE_BITSIZE (TYPE_MODE (type))))
4566 tree rec_type = make_node (RECORD_TYPE);
4567 tree field = create_field_decl (get_identifier ("OBJ"), type,
4568 rec_type, 1, 0, 0, 0);
4570 TYPE_FIELDS (rec_type) = field;
4571 layout_type (rec_type);
4573 expr = unchecked_convert (rec_type, expr, notrunc_p);
4574 expr = build_component_ref (expr, NULL_TREE, field, 0);
4577 /* Similarly if we are converting from an integral type whose precision
4578 is not equal to its size. */
4579 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4580 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4581 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4583 tree rec_type = make_node (RECORD_TYPE);
4585 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4588 TYPE_FIELDS (rec_type) = field;
4589 layout_type (rec_type);
4591 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4592 expr = unchecked_convert (type, expr, notrunc_p);
4595 /* We have a special case when we are converting between two unconstrained
4596 array types. In that case, take the address, convert the fat pointer
4597 types, and dereference. */
4598 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4599 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4600 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4601 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4602 build_unary_op (ADDR_EXPR, NULL_TREE,
4605 /* Another special case is when we are converting to a vector type from its
4606 representative array type; this a regular conversion. */
4607 else if (TREE_CODE (type) == VECTOR_TYPE
4608 && TREE_CODE (etype) == ARRAY_TYPE
4609 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4611 expr = convert (type, expr);
4615 expr = maybe_unconstrained_array (expr);
4616 etype = TREE_TYPE (expr);
4617 if (can_fold_for_view_convert_p (expr))
4618 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4620 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4623 /* If the result is an integral type whose precision is not equal to its
4624 size, sign- or zero-extend the result. We need not do this if the input
4625 is an integral type of the same precision and signedness or if the output
4626 is a biased type or if both the input and output are unsigned. */
4628 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4629 && !(TREE_CODE (type) == INTEGER_TYPE
4630 && TYPE_BIASED_REPRESENTATION_P (type))
4631 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4632 GET_MODE_BITSIZE (TYPE_MODE (type)))
4633 && !(INTEGRAL_TYPE_P (etype)
4634 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4635 && operand_equal_p (TYPE_RM_SIZE (type),
4636 (TYPE_RM_SIZE (etype) != 0
4637 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4639 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4641 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4642 TYPE_UNSIGNED (type));
4644 = convert (base_type,
4645 size_binop (MINUS_EXPR,
4647 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4648 TYPE_RM_SIZE (type)));
4651 build_binary_op (RSHIFT_EXPR, base_type,
4652 build_binary_op (LSHIFT_EXPR, base_type,
4653 convert (base_type, expr),
4658 /* An unchecked conversion should never raise Constraint_Error. The code
4659 below assumes that GCC's conversion routines overflow the same way that
4660 the underlying hardware does. This is probably true. In the rare case
4661 when it is false, we can rely on the fact that such conversions are
4662 erroneous anyway. */
4663 if (TREE_CODE (expr) == INTEGER_CST)
4664 TREE_OVERFLOW (expr) = 0;
4666 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4667 show no longer constant. */
4668 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4669 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4671 TREE_CONSTANT (expr) = 0;
4676 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4677 the latter being a record type as predicated by Is_Record_Type. */
4680 tree_code_for_record_type (Entity_Id gnat_type)
4682 Node_Id component_list
4683 = Component_List (Type_Definition
4685 (Implementation_Base_Type (gnat_type))));
4688 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4689 we have a non-discriminant field outside a variant. In either case,
4690 it's a RECORD_TYPE. */
4692 if (!Is_Unchecked_Union (gnat_type))
4695 for (component = First_Non_Pragma (Component_Items (component_list));
4696 Present (component);
4697 component = Next_Non_Pragma (component))
4698 if (Ekind (Defining_Entity (component)) == E_Component)
4704 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4705 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4706 according to the presence of an alignment clause on the type or, if it
4707 is an array, on the component type. */
4710 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4712 gnat_type = Underlying_Type (gnat_type);
4714 *align_clause = Present (Alignment_Clause (gnat_type));
4716 if (Is_Array_Type (gnat_type))
4718 gnat_type = Underlying_Type (Component_Type (gnat_type));
4719 if (Present (Alignment_Clause (gnat_type)))
4720 *align_clause = true;
4723 if (!Is_Floating_Point_Type (gnat_type))
4726 if (UI_To_Int (Esize (gnat_type)) != 64)
4732 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4733 size is greater or equal to 64 bits, or an array of such a type. Set
4734 ALIGN_CLAUSE according to the presence of an alignment clause on the
4735 type or, if it is an array, on the component type. */
4738 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4740 gnat_type = Underlying_Type (gnat_type);
4742 *align_clause = Present (Alignment_Clause (gnat_type));
4744 if (Is_Array_Type (gnat_type))
4746 gnat_type = Underlying_Type (Component_Type (gnat_type));
4747 if (Present (Alignment_Clause (gnat_type)))
4748 *align_clause = true;
4751 if (!Is_Scalar_Type (gnat_type))
4754 if (UI_To_Int (Esize (gnat_type)) < 64)
4760 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4761 component of an aggregate type. */
4764 type_for_nonaliased_component_p (tree gnu_type)
4766 /* If the type is passed by reference, we may have pointers to the
4767 component so it cannot be made non-aliased. */
4768 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4771 /* We used to say that any component of aggregate type is aliased
4772 because the front-end may take 'Reference of it. The front-end
4773 has been enhanced in the meantime so as to use a renaming instead
4774 in most cases, but the back-end can probably take the address of
4775 such a component too so we go for the conservative stance.
4777 For instance, we might need the address of any array type, even
4778 if normally passed by copy, to construct a fat pointer if the
4779 component is used as an actual for an unconstrained formal.
4781 Likewise for record types: even if a specific record subtype is
4782 passed by copy, the parent type might be passed by ref (e.g. if
4783 it's of variable size) and we might take the address of a child
4784 component to pass to a parent formal. We have no way to check
4785 for such conditions here. */
4786 if (AGGREGATE_TYPE_P (gnu_type))
4792 /* Perform final processing on global variables. */
4795 gnat_write_global_declarations (void)
4797 /* Proceed to optimize and emit assembly.
4798 FIXME: shouldn't be the front end's responsibility to call this. */
4799 cgraph_finalize_compilation_unit ();
4801 /* Emit debug info for all global declarations. */
4802 emit_debug_global_declarations (VEC_address (tree, global_decls),
4803 VEC_length (tree, global_decls));
4806 /* ************************************************************************
4807 * * GCC builtins support *
4808 * ************************************************************************ */
4810 /* The general scheme is fairly simple:
4812 For each builtin function/type to be declared, gnat_install_builtins calls
4813 internal facilities which eventually get to gnat_push_decl, which in turn
4814 tracks the so declared builtin function decls in the 'builtin_decls' global
4815 datastructure. When an Intrinsic subprogram declaration is processed, we
4816 search this global datastructure to retrieve the associated BUILT_IN DECL
4819 /* Search the chain of currently available builtin declarations for a node
4820 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4821 found, if any, or NULL_TREE otherwise. */
4823 builtin_decl_for (tree name)
4828 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4829 if (DECL_NAME (decl) == name)
4835 /* The code below eventually exposes gnat_install_builtins, which declares
4836 the builtin types and functions we might need, either internally or as
4837 user accessible facilities.
4839 ??? This is a first implementation shot, still in rough shape. It is
4840 heavily inspired from the "C" family implementation, with chunks copied
4841 verbatim from there.
4843 Two obvious TODO candidates are
4844 o Use a more efficient name/decl mapping scheme
4845 o Devise a middle-end infrastructure to avoid having to copy
4846 pieces between front-ends. */
4848 /* ----------------------------------------------------------------------- *
4849 * BUILTIN ELEMENTARY TYPES *
4850 * ----------------------------------------------------------------------- */
4852 /* Standard data types to be used in builtin argument declarations. */
4856 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4858 CTI_CONST_STRING_TYPE,
4863 static tree c_global_trees[CTI_MAX];
4865 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4866 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4867 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4869 /* ??? In addition some attribute handlers, we currently don't support a
4870 (small) number of builtin-types, which in turns inhibits support for a
4871 number of builtin functions. */
4872 #define wint_type_node void_type_node
4873 #define intmax_type_node void_type_node
4874 #define uintmax_type_node void_type_node
4876 /* Build the void_list_node (void_type_node having been created). */
4879 build_void_list_node (void)
4881 tree t = build_tree_list (NULL_TREE, void_type_node);
4885 /* Used to help initialize the builtin-types.def table. When a type of
4886 the correct size doesn't exist, use error_mark_node instead of NULL.
4887 The later results in segfaults even when a decl using the type doesn't
4891 builtin_type_for_size (int size, bool unsignedp)
4893 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4894 return type ? type : error_mark_node;
4897 /* Build/push the elementary type decls that builtin functions/types
4901 install_builtin_elementary_types (void)
4903 signed_size_type_node = size_type_node;
4904 pid_type_node = integer_type_node;
4905 void_list_node = build_void_list_node ();
4907 string_type_node = build_pointer_type (char_type_node);
4908 const_string_type_node
4909 = build_pointer_type (build_qualified_type
4910 (char_type_node, TYPE_QUAL_CONST));
4913 /* ----------------------------------------------------------------------- *
4914 * BUILTIN FUNCTION TYPES *
4915 * ----------------------------------------------------------------------- */
4917 /* Now, builtin function types per se. */
4921 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4922 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4923 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4924 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4925 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4926 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4927 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4928 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4929 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4930 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4931 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4932 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4933 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4934 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4935 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4937 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4938 #include "builtin-types.def"
4939 #undef DEF_PRIMITIVE_TYPE
4940 #undef DEF_FUNCTION_TYPE_0
4941 #undef DEF_FUNCTION_TYPE_1
4942 #undef DEF_FUNCTION_TYPE_2
4943 #undef DEF_FUNCTION_TYPE_3
4944 #undef DEF_FUNCTION_TYPE_4
4945 #undef DEF_FUNCTION_TYPE_5
4946 #undef DEF_FUNCTION_TYPE_6
4947 #undef DEF_FUNCTION_TYPE_7
4948 #undef DEF_FUNCTION_TYPE_VAR_0
4949 #undef DEF_FUNCTION_TYPE_VAR_1
4950 #undef DEF_FUNCTION_TYPE_VAR_2
4951 #undef DEF_FUNCTION_TYPE_VAR_3
4952 #undef DEF_FUNCTION_TYPE_VAR_4
4953 #undef DEF_FUNCTION_TYPE_VAR_5
4954 #undef DEF_POINTER_TYPE
4958 typedef enum c_builtin_type builtin_type;
4960 /* A temporary array used in communication with def_fn_type. */
4961 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4963 /* A helper function for install_builtin_types. Build function type
4964 for DEF with return type RET and N arguments. If VAR is true, then the
4965 function should be variadic after those N arguments.
4967 Takes special care not to ICE if any of the types involved are
4968 error_mark_node, which indicates that said type is not in fact available
4969 (see builtin_type_for_size). In which case the function type as a whole
4970 should be error_mark_node. */
4973 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4975 tree args = NULL, t;
4980 for (i = 0; i < n; ++i)
4982 builtin_type a = (builtin_type) va_arg (list, int);
4983 t = builtin_types[a];
4984 if (t == error_mark_node)
4986 args = tree_cons (NULL_TREE, t, args);
4990 args = nreverse (args);
4992 args = chainon (args, void_list_node);
4994 t = builtin_types[ret];
4995 if (t == error_mark_node)
4997 t = build_function_type (t, args);
5000 builtin_types[def] = t;
5003 /* Build the builtin function types and install them in the builtin_types
5004 array for later use in builtin function decls. */
5007 install_builtin_function_types (void)
5009 tree va_list_ref_type_node;
5010 tree va_list_arg_type_node;
5012 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5014 va_list_arg_type_node = va_list_ref_type_node =
5015 build_pointer_type (TREE_TYPE (va_list_type_node));
5019 va_list_arg_type_node = va_list_type_node;
5020 va_list_ref_type_node = build_reference_type (va_list_type_node);
5023 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5024 builtin_types[ENUM] = VALUE;
5025 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5026 def_fn_type (ENUM, RETURN, 0, 0);
5027 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5028 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5029 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5030 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5031 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5032 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5033 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5034 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5035 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5036 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5037 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5039 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5040 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5042 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5043 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5044 def_fn_type (ENUM, RETURN, 1, 0);
5045 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5046 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5047 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5048 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5049 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5050 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5051 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5052 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5053 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5054 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5055 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5056 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5058 #include "builtin-types.def"
5060 #undef DEF_PRIMITIVE_TYPE
5061 #undef DEF_FUNCTION_TYPE_1
5062 #undef DEF_FUNCTION_TYPE_2
5063 #undef DEF_FUNCTION_TYPE_3
5064 #undef DEF_FUNCTION_TYPE_4
5065 #undef DEF_FUNCTION_TYPE_5
5066 #undef DEF_FUNCTION_TYPE_6
5067 #undef DEF_FUNCTION_TYPE_VAR_0
5068 #undef DEF_FUNCTION_TYPE_VAR_1
5069 #undef DEF_FUNCTION_TYPE_VAR_2
5070 #undef DEF_FUNCTION_TYPE_VAR_3
5071 #undef DEF_FUNCTION_TYPE_VAR_4
5072 #undef DEF_FUNCTION_TYPE_VAR_5
5073 #undef DEF_POINTER_TYPE
5074 builtin_types[(int) BT_LAST] = NULL_TREE;
5077 /* ----------------------------------------------------------------------- *
5078 * BUILTIN ATTRIBUTES *
5079 * ----------------------------------------------------------------------- */
5081 enum built_in_attribute
5083 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5084 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5085 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5086 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5087 #include "builtin-attrs.def"
5088 #undef DEF_ATTR_NULL_TREE
5090 #undef DEF_ATTR_IDENT
5091 #undef DEF_ATTR_TREE_LIST
5095 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5098 install_builtin_attributes (void)
5100 /* Fill in the built_in_attributes array. */
5101 #define DEF_ATTR_NULL_TREE(ENUM) \
5102 built_in_attributes[(int) ENUM] = NULL_TREE;
5103 #define DEF_ATTR_INT(ENUM, VALUE) \
5104 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5105 #define DEF_ATTR_IDENT(ENUM, STRING) \
5106 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5107 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5108 built_in_attributes[(int) ENUM] \
5109 = tree_cons (built_in_attributes[(int) PURPOSE], \
5110 built_in_attributes[(int) VALUE], \
5111 built_in_attributes[(int) CHAIN]);
5112 #include "builtin-attrs.def"
5113 #undef DEF_ATTR_NULL_TREE
5115 #undef DEF_ATTR_IDENT
5116 #undef DEF_ATTR_TREE_LIST
5119 /* Handle a "const" attribute; arguments as in
5120 struct attribute_spec.handler. */
5123 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5124 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5127 if (TREE_CODE (*node) == FUNCTION_DECL)
5128 TREE_READONLY (*node) = 1;
5130 *no_add_attrs = true;
5135 /* Handle a "nothrow" attribute; arguments as in
5136 struct attribute_spec.handler. */
5139 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5140 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5143 if (TREE_CODE (*node) == FUNCTION_DECL)
5144 TREE_NOTHROW (*node) = 1;
5146 *no_add_attrs = true;
5151 /* Handle a "pure" attribute; arguments as in
5152 struct attribute_spec.handler. */
5155 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5156 int ARG_UNUSED (flags), bool *no_add_attrs)
5158 if (TREE_CODE (*node) == FUNCTION_DECL)
5159 DECL_PURE_P (*node) = 1;
5160 /* ??? TODO: Support types. */
5163 warning (OPT_Wattributes, "%qs attribute ignored",
5164 IDENTIFIER_POINTER (name));
5165 *no_add_attrs = true;
5171 /* Handle a "no vops" attribute; arguments as in
5172 struct attribute_spec.handler. */
5175 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5176 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5177 bool *ARG_UNUSED (no_add_attrs))
5179 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5180 DECL_IS_NOVOPS (*node) = 1;
5184 /* Helper for nonnull attribute handling; fetch the operand number
5185 from the attribute argument list. */
5188 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5190 /* Verify the arg number is a constant. */
5191 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5192 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5195 *valp = TREE_INT_CST_LOW (arg_num_expr);
5199 /* Handle the "nonnull" attribute. */
5201 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5202 tree args, int ARG_UNUSED (flags),
5206 unsigned HOST_WIDE_INT attr_arg_num;
5208 /* If no arguments are specified, all pointer arguments should be
5209 non-null. Verify a full prototype is given so that the arguments
5210 will have the correct types when we actually check them later. */
5213 if (!TYPE_ARG_TYPES (type))
5215 error ("nonnull attribute without arguments on a non-prototype");
5216 *no_add_attrs = true;
5221 /* Argument list specified. Verify that each argument number references
5222 a pointer argument. */
5223 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5226 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5228 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5230 error ("nonnull argument has invalid operand number (argument %lu)",
5231 (unsigned long) attr_arg_num);
5232 *no_add_attrs = true;
5236 argument = TYPE_ARG_TYPES (type);
5239 for (ck_num = 1; ; ck_num++)
5241 if (!argument || ck_num == arg_num)
5243 argument = TREE_CHAIN (argument);
5247 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5249 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5250 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5251 *no_add_attrs = true;
5255 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5257 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5258 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5259 *no_add_attrs = true;
5268 /* Handle a "sentinel" attribute. */
5271 handle_sentinel_attribute (tree *node, tree name, tree args,
5272 int ARG_UNUSED (flags), bool *no_add_attrs)
5274 tree params = TYPE_ARG_TYPES (*node);
5278 warning (OPT_Wattributes,
5279 "%qs attribute requires prototypes with named arguments",
5280 IDENTIFIER_POINTER (name));
5281 *no_add_attrs = true;
5285 while (TREE_CHAIN (params))
5286 params = TREE_CHAIN (params);
5288 if (VOID_TYPE_P (TREE_VALUE (params)))
5290 warning (OPT_Wattributes,
5291 "%qs attribute only applies to variadic functions",
5292 IDENTIFIER_POINTER (name));
5293 *no_add_attrs = true;
5299 tree position = TREE_VALUE (args);
5301 if (TREE_CODE (position) != INTEGER_CST)
5303 warning (0, "requested position is not an integer constant");
5304 *no_add_attrs = true;
5308 if (tree_int_cst_lt (position, integer_zero_node))
5310 warning (0, "requested position is less than zero");
5311 *no_add_attrs = true;
5319 /* Handle a "noreturn" attribute; arguments as in
5320 struct attribute_spec.handler. */
5323 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5324 int ARG_UNUSED (flags), bool *no_add_attrs)
5326 tree type = TREE_TYPE (*node);
5328 /* See FIXME comment in c_common_attribute_table. */
5329 if (TREE_CODE (*node) == FUNCTION_DECL)
5330 TREE_THIS_VOLATILE (*node) = 1;
5331 else if (TREE_CODE (type) == POINTER_TYPE
5332 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5334 = build_pointer_type
5335 (build_type_variant (TREE_TYPE (type),
5336 TYPE_READONLY (TREE_TYPE (type)), 1));
5339 warning (OPT_Wattributes, "%qs attribute ignored",
5340 IDENTIFIER_POINTER (name));
5341 *no_add_attrs = true;
5347 /* Handle a "malloc" attribute; arguments as in
5348 struct attribute_spec.handler. */
5351 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5352 int ARG_UNUSED (flags), bool *no_add_attrs)
5354 if (TREE_CODE (*node) == FUNCTION_DECL
5355 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5356 DECL_IS_MALLOC (*node) = 1;
5359 warning (OPT_Wattributes, "%qs attribute ignored",
5360 IDENTIFIER_POINTER (name));
5361 *no_add_attrs = true;
5367 /* Fake handler for attributes we don't properly support. */
5370 fake_attribute_handler (tree * ARG_UNUSED (node),
5371 tree ARG_UNUSED (name),
5372 tree ARG_UNUSED (args),
5373 int ARG_UNUSED (flags),
5374 bool * ARG_UNUSED (no_add_attrs))
5379 /* Handle a "type_generic" attribute. */
5382 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5383 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5384 bool * ARG_UNUSED (no_add_attrs))
5388 /* Ensure we have a function type. */
5389 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5391 params = TYPE_ARG_TYPES (*node);
5392 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5393 params = TREE_CHAIN (params);
5395 /* Ensure we have a variadic function. */
5396 gcc_assert (!params);
5401 /* Handle a "vector_size" attribute; arguments as in
5402 struct attribute_spec.handler. */
5405 handle_vector_size_attribute (tree *node, tree name, tree args,
5406 int ARG_UNUSED (flags),
5409 unsigned HOST_WIDE_INT vecsize, nunits;
5410 enum machine_mode orig_mode;
5411 tree type = *node, new_type, size;
5413 *no_add_attrs = true;
5415 size = TREE_VALUE (args);
5417 if (!host_integerp (size, 1))
5419 warning (OPT_Wattributes, "%qs attribute ignored",
5420 IDENTIFIER_POINTER (name));
5424 /* Get the vector size (in bytes). */
5425 vecsize = tree_low_cst (size, 1);
5427 /* We need to provide for vector pointers, vector arrays, and
5428 functions returning vectors. For example:
5430 __attribute__((vector_size(16))) short *foo;
5432 In this case, the mode is SI, but the type being modified is
5433 HI, so we need to look further. */
5435 while (POINTER_TYPE_P (type)
5436 || TREE_CODE (type) == FUNCTION_TYPE
5437 || TREE_CODE (type) == METHOD_TYPE
5438 || TREE_CODE (type) == ARRAY_TYPE
5439 || TREE_CODE (type) == OFFSET_TYPE)
5440 type = TREE_TYPE (type);
5442 /* Get the mode of the type being modified. */
5443 orig_mode = TYPE_MODE (type);
5445 if ((!INTEGRAL_TYPE_P (type)
5446 && !SCALAR_FLOAT_TYPE_P (type)
5447 && !FIXED_POINT_TYPE_P (type))
5448 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5449 && GET_MODE_CLASS (orig_mode) != MODE_INT
5450 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5451 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5452 || TREE_CODE (type) == BOOLEAN_TYPE)
5454 error ("invalid vector type for attribute %qs",
5455 IDENTIFIER_POINTER (name));
5459 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5461 error ("vector size not an integral multiple of component size");
5467 error ("zero vector size");
5471 /* Calculate how many units fit in the vector. */
5472 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5473 if (nunits & (nunits - 1))
5475 error ("number of components of the vector not a power of two");
5479 new_type = build_vector_type (type, nunits);
5481 /* Build back pointers if needed. */
5482 *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5487 /* Handle a "vector_type" attribute; arguments as in
5488 struct attribute_spec.handler. */
5491 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5492 int ARG_UNUSED (flags),
5495 /* Vector representative type and size. */
5496 tree rep_type = *node;
5497 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5500 /* Vector size in bytes and number of units. */
5501 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5503 /* Vector element type and mode. */
5505 enum machine_mode elem_mode;
5507 *no_add_attrs = true;
5509 /* Get the representative array type, possibly nested within a
5510 padding record e.g. for alignment purposes. */
5512 if (TYPE_IS_PADDING_P (rep_type))
5513 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5515 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5517 error ("attribute %qs applies to array types only",
5518 IDENTIFIER_POINTER (name));
5522 /* Silently punt on variable sizes. We can't make vector types for them,
5523 need to ignore them on front-end generated subtypes of unconstrained
5524 bases, and this attribute is for binding implementors, not end-users, so
5525 we should never get there from legitimate explicit uses. */
5527 if (!host_integerp (rep_size, 1))
5530 /* Get the element type/mode and check this is something we know
5531 how to make vectors of. */
5533 elem_type = TREE_TYPE (rep_type);
5534 elem_mode = TYPE_MODE (elem_type);
5536 if ((!INTEGRAL_TYPE_P (elem_type)
5537 && !SCALAR_FLOAT_TYPE_P (elem_type)
5538 && !FIXED_POINT_TYPE_P (elem_type))
5539 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5540 && GET_MODE_CLASS (elem_mode) != MODE_INT
5541 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5542 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5544 error ("invalid element type for attribute %qs",
5545 IDENTIFIER_POINTER (name));
5549 /* Sanity check the vector size and element type consistency. */
5551 vec_bytes = tree_low_cst (rep_size, 1);
5553 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5555 error ("vector size not an integral multiple of component size");
5561 error ("zero vector size");
5565 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5566 if (vec_units & (vec_units - 1))
5568 error ("number of components of the vector not a power of two");
5572 /* Build the vector type and replace. */
5574 *node = build_vector_type (elem_type, vec_units);
5575 rep_name = TYPE_NAME (rep_type);
5576 if (TREE_CODE (rep_name) == TYPE_DECL)
5577 rep_name = DECL_NAME (rep_name);
5578 TYPE_NAME (*node) = rep_name;
5579 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5584 /* ----------------------------------------------------------------------- *
5585 * BUILTIN FUNCTIONS *
5586 * ----------------------------------------------------------------------- */
5588 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5589 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5590 if nonansi_p and flag_no_nonansi_builtin. */
5593 def_builtin_1 (enum built_in_function fncode,
5595 enum built_in_class fnclass,
5596 tree fntype, tree libtype,
5597 bool both_p, bool fallback_p,
5598 bool nonansi_p ATTRIBUTE_UNUSED,
5599 tree fnattrs, bool implicit_p)
5602 const char *libname;
5604 /* Preserve an already installed decl. It most likely was setup in advance
5605 (e.g. as part of the internal builtins) for specific reasons. */
5606 if (built_in_decls[(int) fncode] != NULL_TREE)
5609 gcc_assert ((!both_p && !fallback_p)
5610 || !strncmp (name, "__builtin_",
5611 strlen ("__builtin_")));
5613 libname = name + strlen ("__builtin_");
5614 decl = add_builtin_function (name, fntype, fncode, fnclass,
5615 (fallback_p ? libname : NULL),
5618 /* ??? This is normally further controlled by command-line options
5619 like -fno-builtin, but we don't have them for Ada. */
5620 add_builtin_function (libname, libtype, fncode, fnclass,
5623 built_in_decls[(int) fncode] = decl;
5625 implicit_built_in_decls[(int) fncode] = decl;
5628 static int flag_isoc94 = 0;
5629 static int flag_isoc99 = 0;
5631 /* Install what the common builtins.def offers. */
5634 install_builtin_functions (void)
5636 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5637 NONANSI_P, ATTRS, IMPLICIT, COND) \
5639 def_builtin_1 (ENUM, NAME, CLASS, \
5640 builtin_types[(int) TYPE], \
5641 builtin_types[(int) LIBTYPE], \
5642 BOTH_P, FALLBACK_P, NONANSI_P, \
5643 built_in_attributes[(int) ATTRS], IMPLICIT);
5644 #include "builtins.def"
5648 /* ----------------------------------------------------------------------- *
5649 * BUILTIN FUNCTIONS *
5650 * ----------------------------------------------------------------------- */
5652 /* Install the builtin functions we might need. */
5655 gnat_install_builtins (void)
5657 install_builtin_elementary_types ();
5658 install_builtin_function_types ();
5659 install_builtin_attributes ();
5661 /* Install builtins used by generic middle-end pieces first. Some of these
5662 know about internal specificities and control attributes accordingly, for
5663 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5664 the generic definition from builtins.def. */
5665 build_common_builtin_nodes ();
5667 /* Now, install the target specific builtins, such as the AltiVec family on
5668 ppc, and the common set as exposed by builtins.def. */
5669 targetm.init_builtins ();
5670 install_builtin_functions ();
5673 #include "gt-ada-utils.h"
5674 #include "gtype-ada.h"