1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
38 #include "langhooks.h"
40 #include "tree-dump.h"
41 #include "tree-inline.h"
42 #include "tree-iterator.h"
58 #ifndef MAX_BITS_PER_WORD
59 #define MAX_BITS_PER_WORD BITS_PER_WORD
62 /* If nonzero, pretend we are allocating at global level. */
65 /* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68 int double_float_alignment;
70 /* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73 int double_scalar_alignment;
75 /* Tree nodes for the various types and decls we create. */
76 tree gnat_std_decls[(int) ADT_LAST];
78 /* Functions to call for each of the possible raise reasons. */
79 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81 /* Forward declarations for handlers of attributes. */
82 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
83 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
84 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
85 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
94 /* Fake handler for attributes we don't properly support, typically because
95 they'd require dragging a lot of the common-c front-end circuitry. */
96 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
98 /* Table of machine-independent internal attributes for Ada. We support
99 this minimal set of attributes to accommodate the needs of builtins. */
100 const struct attribute_spec gnat_internal_attribute_table[] =
102 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
103 { "const", 0, 0, true, false, false, handle_const_attribute },
104 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
105 { "pure", 0, 0, true, false, false, handle_pure_attribute },
106 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
107 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
108 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
109 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
110 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
111 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
113 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
114 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
115 { "may_alias", 0, 0, false, true, false, NULL },
117 /* ??? format and format_arg are heavy and not supported, which actually
118 prevents support for stdio builtins, which we however declare as part
119 of the common builtins.def contents. */
120 { "format", 3, 3, false, true, true, fake_attribute_handler },
121 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
123 { NULL, 0, 0, false, false, false, NULL }
126 /* Associates a GNAT tree node to a GCC tree node. It is used in
127 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
128 of `save_gnu_tree' for more info. */
129 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
131 #define GET_GNU_TREE(GNAT_ENTITY) \
132 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
134 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
135 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
137 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
138 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
140 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
141 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
143 #define GET_DUMMY_NODE(GNAT_ENTITY) \
144 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
146 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
147 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
149 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
150 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
152 /* This variable keeps a table for types for each precision so that we only
153 allocate each of them once. Signed and unsigned types are kept separate.
155 Note that these types are only used when fold-const requests something
156 special. Perhaps we should NOT share these types; we'll see how it
158 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
160 /* Likewise for float types, but record these by mode. */
161 static GTY(()) tree float_types[NUM_MACHINE_MODES];
163 /* For each binding contour we allocate a binding_level structure to indicate
164 the binding depth. */
166 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
167 /* The binding level containing this one (the enclosing binding level). */
168 struct gnat_binding_level *chain;
169 /* The BLOCK node for this level. */
171 /* If nonzero, the setjmp buffer that needs to be updated for any
172 variable-sized definition within this context. */
176 /* The binding level currently in effect. */
177 static GTY(()) struct gnat_binding_level *current_binding_level;
179 /* A chain of gnat_binding_level structures awaiting reuse. */
180 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
182 /* An array of global declarations. */
183 static GTY(()) VEC(tree,gc) *global_decls;
185 /* An array of builtin function declarations. */
186 static GTY(()) VEC(tree,gc) *builtin_decls;
188 /* An array of global renaming pointers. */
189 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
191 /* A chain of unused BLOCK nodes. */
192 static GTY((deletable)) tree free_block_chain;
194 static tree merge_sizes (tree, tree, tree, bool, bool);
195 static tree compute_related_constant (tree, tree);
196 static tree split_plus (tree, tree *);
197 static tree float_type_for_precision (int, enum machine_mode);
198 static tree convert_to_fat_pointer (tree, tree);
199 static tree convert_to_thin_pointer (tree, tree);
200 static tree make_descriptor_field (const char *,tree, tree, tree);
201 static bool potential_alignment_gap (tree, tree, tree);
202 static void process_attributes (tree, struct attrib *);
204 /* Initialize the association of GNAT nodes to GCC trees. */
207 init_gnat_to_gnu (void)
209 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
212 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
213 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
214 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
216 If GNU_DECL is zero, a previous association is to be reset. */
219 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
221 /* Check that GNAT_ENTITY is not already defined and that it is being set
222 to something which is a decl. Raise gigi 401 if not. Usually, this
223 means GNAT_ENTITY is defined twice, but occasionally is due to some
225 gcc_assert (!(gnu_decl
226 && (PRESENT_GNU_TREE (gnat_entity)
227 || (!no_check && !DECL_P (gnu_decl)))));
229 SET_GNU_TREE (gnat_entity, gnu_decl);
232 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
233 Return the ..._DECL node that was associated with it. If there is no tree
234 node associated with GNAT_ENTITY, abort.
236 In some cases, such as delayed elaboration or expressions that need to
237 be elaborated only once, GNAT_ENTITY is really not an entity. */
240 get_gnu_tree (Entity_Id gnat_entity)
242 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
243 return GET_GNU_TREE (gnat_entity);
246 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
249 present_gnu_tree (Entity_Id gnat_entity)
251 return PRESENT_GNU_TREE (gnat_entity);
254 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
257 init_dummy_type (void)
259 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
262 /* Make a dummy type corresponding to GNAT_TYPE. */
265 make_dummy_type (Entity_Id gnat_type)
267 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
270 /* If there is an equivalent type, get its underlying type. */
271 if (Present (gnat_underlying))
272 gnat_underlying = Underlying_Type (gnat_underlying);
274 /* If there was no equivalent type (can only happen when just annotating
275 types) or underlying type, go back to the original type. */
276 if (No (gnat_underlying))
277 gnat_underlying = gnat_type;
279 /* If it there already a dummy type, use that one. Else make one. */
280 if (PRESENT_DUMMY_NODE (gnat_underlying))
281 return GET_DUMMY_NODE (gnat_underlying);
283 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
285 gnu_type = make_node (Is_Record_Type (gnat_underlying)
286 ? tree_code_for_record_type (gnat_underlying)
288 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
289 TYPE_DUMMY_P (gnu_type) = 1;
290 TYPE_STUB_DECL (gnu_type)
291 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
292 if (Is_By_Reference_Type (gnat_type))
293 TREE_ADDRESSABLE (gnu_type) = 1;
295 SET_DUMMY_NODE (gnat_underlying, gnu_type);
300 /* Return nonzero if we are currently in the global binding level. */
303 global_bindings_p (void)
305 return ((force_global || !current_function_decl) ? -1 : 0);
308 /* Enter a new binding level. */
311 gnat_pushlevel (void)
313 struct gnat_binding_level *newlevel = NULL;
315 /* Reuse a struct for this binding level, if there is one. */
316 if (free_binding_level)
318 newlevel = free_binding_level;
319 free_binding_level = free_binding_level->chain;
322 newlevel = ggc_alloc_gnat_binding_level ();
324 /* Use a free BLOCK, if any; otherwise, allocate one. */
325 if (free_block_chain)
327 newlevel->block = free_block_chain;
328 free_block_chain = BLOCK_CHAIN (free_block_chain);
329 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
332 newlevel->block = make_node (BLOCK);
334 /* Point the BLOCK we just made to its parent. */
335 if (current_binding_level)
336 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
338 BLOCK_VARS (newlevel->block) = NULL_TREE;
339 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
340 TREE_USED (newlevel->block) = 1;
342 /* Add this level to the front of the chain (stack) of active levels. */
343 newlevel->chain = current_binding_level;
344 newlevel->jmpbuf_decl = NULL_TREE;
345 current_binding_level = newlevel;
348 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
349 and point FNDECL to this BLOCK. */
352 set_current_block_context (tree fndecl)
354 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
355 DECL_INITIAL (fndecl) = current_binding_level->block;
356 set_block_for_group (current_binding_level->block);
359 /* Set the jmpbuf_decl for the current binding level to DECL. */
362 set_block_jmpbuf_decl (tree decl)
364 current_binding_level->jmpbuf_decl = decl;
367 /* Get the jmpbuf_decl, if any, for the current binding level. */
370 get_block_jmpbuf_decl (void)
372 return current_binding_level->jmpbuf_decl;
375 /* Exit a binding level. Set any BLOCK into the current code group. */
380 struct gnat_binding_level *level = current_binding_level;
381 tree block = level->block;
383 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
384 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
386 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
387 are no variables free the block and merge its subblocks into those of its
388 parent block. Otherwise, add it to the list of its parent. */
389 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
391 else if (BLOCK_VARS (block) == NULL_TREE)
393 BLOCK_SUBBLOCKS (level->chain->block)
394 = chainon (BLOCK_SUBBLOCKS (block),
395 BLOCK_SUBBLOCKS (level->chain->block));
396 BLOCK_CHAIN (block) = free_block_chain;
397 free_block_chain = block;
401 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
402 BLOCK_SUBBLOCKS (level->chain->block) = block;
403 TREE_USED (block) = 1;
404 set_block_for_group (block);
407 /* Free this binding structure. */
408 current_binding_level = level->chain;
409 level->chain = free_binding_level;
410 free_binding_level = level;
414 /* Records a ..._DECL node DECL as belonging to the current lexical scope
415 and uses GNAT_NODE for location information and propagating flags. */
418 gnat_pushdecl (tree decl, Node_Id gnat_node)
420 /* If this decl is public external or at toplevel, there is no context.
421 But PARM_DECLs always go in the level of its function. */
422 if (TREE_CODE (decl) != PARM_DECL
423 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
424 || global_bindings_p ()))
425 DECL_CONTEXT (decl) = 0;
428 DECL_CONTEXT (decl) = current_function_decl;
430 /* Functions imported in another function are not really nested.
431 For really nested functions mark them initially as needing
432 a static chain for uses of that flag before unnesting;
433 lower_nested_functions will then recompute it. */
434 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
435 DECL_STATIC_CHAIN (decl) = 1;
438 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
440 /* Set the location of DECL and emit a declaration for it. */
441 if (Present (gnat_node))
442 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
443 add_decl_expr (decl, gnat_node);
445 /* Put the declaration on the list. The list of declarations is in reverse
446 order. The list will be reversed later. Put global variables in the
447 globals list and builtin functions in a dedicated list to speed up
448 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
449 the list, as they will cause trouble with the debugger and aren't needed
451 if (TREE_CODE (decl) != TYPE_DECL
452 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
454 if (global_bindings_p ())
456 VEC_safe_push (tree, gc, global_decls, decl);
458 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
459 VEC_safe_push (tree, gc, builtin_decls, decl);
463 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
464 BLOCK_VARS (current_binding_level->block) = decl;
468 /* For the declaration of a type, set its name if it either is not already
469 set or if the previous type name was not derived from a source name.
470 We'd rather have the type named with a real name and all the pointer
471 types to the same object have the same POINTER_TYPE node. Code in the
472 equivalent function of c-decl.c makes a copy of the type node here, but
473 that may cause us trouble with incomplete types. We make an exception
474 for fat pointer types because the compiler automatically builds them
475 for unconstrained array types and the debugger uses them to represent
476 both these and pointers to these. */
477 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
479 tree t = TREE_TYPE (decl);
481 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
483 else if (TYPE_IS_FAT_POINTER_P (t))
485 tree tt = build_variant_type_copy (t);
486 TYPE_NAME (tt) = decl;
487 TREE_USED (tt) = TREE_USED (t);
488 TREE_TYPE (decl) = tt;
489 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
490 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
492 DECL_ORIGINAL_TYPE (decl) = t;
494 DECL_ARTIFICIAL (decl) = 0;
496 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
501 /* Propagate the name to all the variants. This is needed for
502 the type qualifiers machinery to work properly. */
504 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
505 TYPE_NAME (t) = decl;
509 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
512 record_builtin_type (const char *name, tree type)
514 tree type_decl = build_decl (input_location,
515 TYPE_DECL, get_identifier (name), type);
517 gnat_pushdecl (type_decl, Empty);
519 if (debug_hooks->type_decl)
520 debug_hooks->type_decl (type_decl, false);
523 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
524 finish constructing the record or union type. If REP_LEVEL is zero, this
525 record has no representation clause and so will be entirely laid out here.
526 If REP_LEVEL is one, this record has a representation clause and has been
527 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
528 this record is derived from a parent record and thus inherits its layout;
529 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
530 we need to write debug information about this type. */
533 finish_record_type (tree record_type, tree field_list, int rep_level,
536 enum tree_code code = TREE_CODE (record_type);
537 tree name = TYPE_NAME (record_type);
538 tree ada_size = bitsize_zero_node;
539 tree size = bitsize_zero_node;
540 bool had_size = TYPE_SIZE (record_type) != 0;
541 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
542 bool had_align = TYPE_ALIGN (record_type) != 0;
545 TYPE_FIELDS (record_type) = field_list;
547 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
548 generate debug info and have a parallel type. */
549 if (name && TREE_CODE (name) == TYPE_DECL)
550 name = DECL_NAME (name);
551 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
553 /* Globally initialize the record first. If this is a rep'ed record,
554 that just means some initializations; otherwise, layout the record. */
557 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
560 TYPE_SIZE_UNIT (record_type) = size_zero_node;
563 TYPE_SIZE (record_type) = bitsize_zero_node;
565 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
566 out just like a UNION_TYPE, since the size will be fixed. */
567 else if (code == QUAL_UNION_TYPE)
572 /* Ensure there isn't a size already set. There can be in an error
573 case where there is a rep clause but all fields have errors and
574 no longer have a position. */
575 TYPE_SIZE (record_type) = 0;
576 layout_type (record_type);
579 /* At this point, the position and size of each field is known. It was
580 either set before entry by a rep clause, or by laying out the type above.
582 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
583 to compute the Ada size; the GCC size and alignment (for rep'ed records
584 that are not padding types); and the mode (for rep'ed records). We also
585 clear the DECL_BIT_FIELD indication for the cases we know have not been
586 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
588 if (code == QUAL_UNION_TYPE)
589 field_list = nreverse (field_list);
591 for (field = field_list; field; field = TREE_CHAIN (field))
593 tree type = TREE_TYPE (field);
594 tree pos = bit_position (field);
595 tree this_size = DECL_SIZE (field);
598 if ((TREE_CODE (type) == RECORD_TYPE
599 || TREE_CODE (type) == UNION_TYPE
600 || TREE_CODE (type) == QUAL_UNION_TYPE)
601 && !TYPE_FAT_POINTER_P (type)
602 && !TYPE_CONTAINS_TEMPLATE_P (type)
603 && TYPE_ADA_SIZE (type))
604 this_ada_size = TYPE_ADA_SIZE (type);
606 this_ada_size = this_size;
608 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
609 if (DECL_BIT_FIELD (field)
610 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
612 unsigned int align = TYPE_ALIGN (type);
614 /* In the general case, type alignment is required. */
615 if (value_factor_p (pos, align))
617 /* The enclosing record type must be sufficiently aligned.
618 Otherwise, if no alignment was specified for it and it
619 has been laid out already, bump its alignment to the
620 desired one if this is compatible with its size. */
621 if (TYPE_ALIGN (record_type) >= align)
623 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
624 DECL_BIT_FIELD (field) = 0;
628 && value_factor_p (TYPE_SIZE (record_type), align))
630 TYPE_ALIGN (record_type) = align;
631 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
632 DECL_BIT_FIELD (field) = 0;
636 /* In the non-strict alignment case, only byte alignment is. */
637 if (!STRICT_ALIGNMENT
638 && DECL_BIT_FIELD (field)
639 && value_factor_p (pos, BITS_PER_UNIT))
640 DECL_BIT_FIELD (field) = 0;
643 /* If we still have DECL_BIT_FIELD set at this point, we know that the
644 field is technically not addressable. Except that it can actually
645 be addressed if it is BLKmode and happens to be properly aligned. */
646 if (DECL_BIT_FIELD (field)
647 && !(DECL_MODE (field) == BLKmode
648 && value_factor_p (pos, BITS_PER_UNIT)))
649 DECL_NONADDRESSABLE_P (field) = 1;
651 /* A type must be as aligned as its most aligned field that is not
652 a bit-field. But this is already enforced by layout_type. */
653 if (rep_level > 0 && !DECL_BIT_FIELD (field))
654 TYPE_ALIGN (record_type)
655 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
660 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
661 size = size_binop (MAX_EXPR, size, this_size);
664 case QUAL_UNION_TYPE:
666 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
667 this_ada_size, ada_size);
668 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
673 /* Since we know here that all fields are sorted in order of
674 increasing bit position, the size of the record is one
675 higher than the ending bit of the last field processed
676 unless we have a rep clause, since in that case we might
677 have a field outside a QUAL_UNION_TYPE that has a higher ending
678 position. So use a MAX in that case. Also, if this field is a
679 QUAL_UNION_TYPE, we need to take into account the previous size in
680 the case of empty variants. */
682 = merge_sizes (ada_size, pos, this_ada_size,
683 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
685 = merge_sizes (size, pos, this_size,
686 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
694 if (code == QUAL_UNION_TYPE)
695 nreverse (field_list);
699 /* If this is a padding record, we never want to make the size smaller
700 than what was specified in it, if any. */
701 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
702 size = TYPE_SIZE (record_type);
704 /* Now set any of the values we've just computed that apply. */
705 if (!TYPE_FAT_POINTER_P (record_type)
706 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
707 SET_TYPE_ADA_SIZE (record_type, ada_size);
711 tree size_unit = had_size_unit
712 ? TYPE_SIZE_UNIT (record_type)
714 size_binop (CEIL_DIV_EXPR, size,
716 unsigned int align = TYPE_ALIGN (record_type);
718 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
719 TYPE_SIZE_UNIT (record_type)
720 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
722 compute_record_mode (record_type);
727 rest_of_record_type_compilation (record_type);
730 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
731 associated with it. It need not be invoked directly in most cases since
732 finish_record_type takes care of doing so, but this can be necessary if
733 a parallel type is to be attached to the record type. */
736 rest_of_record_type_compilation (tree record_type)
738 tree field_list = TYPE_FIELDS (record_type);
740 enum tree_code code = TREE_CODE (record_type);
741 bool var_size = false;
743 for (field = field_list; field; field = TREE_CHAIN (field))
745 /* We need to make an XVE/XVU record if any field has variable size,
746 whether or not the record does. For example, if we have a union,
747 it may be that all fields, rounded up to the alignment, have the
748 same size, in which case we'll use that size. But the debug
749 output routines (except Dwarf2) won't be able to output the fields,
750 so we need to make the special record. */
751 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
752 /* If a field has a non-constant qualifier, the record will have
753 variable size too. */
754 || (code == QUAL_UNION_TYPE
755 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
762 /* If this record is of variable size, rename it so that the
763 debugger knows it is and make a new, parallel, record
764 that tells the debugger how the record is laid out. See
765 exp_dbug.ads. But don't do this for records that are padding
766 since they confuse GDB. */
767 if (var_size && !TYPE_IS_PADDING_P (record_type))
770 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
771 ? UNION_TYPE : TREE_CODE (record_type));
772 tree orig_name = TYPE_NAME (record_type), new_name;
773 tree last_pos = bitsize_zero_node;
774 tree old_field, prev_old_field = NULL_TREE;
776 if (TREE_CODE (orig_name) == TYPE_DECL)
777 orig_name = DECL_NAME (orig_name);
780 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
782 TYPE_NAME (new_record_type) = new_name;
783 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
784 TYPE_STUB_DECL (new_record_type)
785 = create_type_stub_decl (new_name, new_record_type);
786 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
787 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
788 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
789 TYPE_SIZE_UNIT (new_record_type)
790 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
792 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
794 /* Now scan all the fields, replacing each field with a new
795 field corresponding to the new encoding. */
796 for (old_field = TYPE_FIELDS (record_type); old_field;
797 old_field = TREE_CHAIN (old_field))
799 tree field_type = TREE_TYPE (old_field);
800 tree field_name = DECL_NAME (old_field);
802 tree curpos = bit_position (old_field);
804 unsigned int align = 0;
807 /* See how the position was modified from the last position.
809 There are two basic cases we support: a value was added
810 to the last position or the last position was rounded to
811 a boundary and they something was added. Check for the
812 first case first. If not, see if there is any evidence
813 of rounding. If so, round the last position and try
816 If this is a union, the position can be taken as zero. */
818 /* Some computations depend on the shape of the position expression,
819 so strip conversions to make sure it's exposed. */
820 curpos = remove_conversions (curpos, true);
822 if (TREE_CODE (new_record_type) == UNION_TYPE)
823 pos = bitsize_zero_node, align = 0;
825 pos = compute_related_constant (curpos, last_pos);
827 if (!pos && TREE_CODE (curpos) == MULT_EXPR
828 && host_integerp (TREE_OPERAND (curpos, 1), 1))
830 tree offset = TREE_OPERAND (curpos, 0);
831 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
833 /* An offset which is a bitwise AND with a negative power of 2
834 means an alignment corresponding to this power of 2. Note
835 that, as sizetype is sign-extended but nonetheless unsigned,
836 we don't directly use tree_int_cst_sgn. */
837 offset = remove_conversions (offset, true);
838 if (TREE_CODE (offset) == BIT_AND_EXPR
839 && host_integerp (TREE_OPERAND (offset, 1), 0)
840 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
843 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
844 if (exact_log2 (pow) > 0)
848 pos = compute_related_constant (curpos,
849 round_up (last_pos, align));
851 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
852 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
853 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
854 && host_integerp (TREE_OPERAND
855 (TREE_OPERAND (curpos, 0), 1),
860 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
861 pos = compute_related_constant (curpos,
862 round_up (last_pos, align));
864 else if (potential_alignment_gap (prev_old_field, old_field,
867 align = TYPE_ALIGN (field_type);
868 pos = compute_related_constant (curpos,
869 round_up (last_pos, align));
872 /* If we can't compute a position, set it to zero.
874 ??? We really should abort here, but it's too much work
875 to get this correct for all cases. */
878 pos = bitsize_zero_node;
880 /* See if this type is variable-sized and make a pointer type
881 and indicate the indirection if so. Beware that the debug
882 back-end may adjust the position computed above according
883 to the alignment of the field type, i.e. the pointer type
884 in this case, if we don't preventively counter that. */
885 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
887 field_type = build_pointer_type (field_type);
888 if (align != 0 && TYPE_ALIGN (field_type) > align)
890 field_type = copy_node (field_type);
891 TYPE_ALIGN (field_type) = align;
896 /* Make a new field name, if necessary. */
897 if (var || align != 0)
902 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
903 align / BITS_PER_UNIT);
905 strcpy (suffix, "XVL");
907 field_name = concat_name (field_name, suffix);
911 = create_field_decl (field_name, field_type, new_record_type,
912 DECL_SIZE (old_field), pos, 0, 0);
913 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
914 TYPE_FIELDS (new_record_type) = new_field;
916 /* If old_field is a QUAL_UNION_TYPE, take its size as being
917 zero. The only time it's not the last field of the record
918 is when there are other components at fixed positions after
919 it (meaning there was a rep clause for every field) and we
920 want to be able to encode them. */
921 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
922 (TREE_CODE (TREE_TYPE (old_field))
925 : DECL_SIZE (old_field));
926 prev_old_field = old_field;
929 TYPE_FIELDS (new_record_type)
930 = nreverse (TYPE_FIELDS (new_record_type));
932 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
935 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
938 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
941 add_parallel_type (tree decl, tree parallel_type)
945 while (DECL_PARALLEL_TYPE (d))
946 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
948 SET_DECL_PARALLEL_TYPE (d, parallel_type);
951 /* Return the parallel type associated to a type, if any. */
954 get_parallel_type (tree type)
956 if (TYPE_STUB_DECL (type))
957 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
962 /* Utility function of above to merge LAST_SIZE, the previous size of a record
963 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
964 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
965 replace a value of zero with the old size. If HAS_REP is true, we take the
966 MAX of the end position of this field with LAST_SIZE. In all other cases,
967 we use FIRST_BIT plus SIZE. Return an expression for the size. */
970 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
973 tree type = TREE_TYPE (last_size);
976 if (!special || TREE_CODE (size) != COND_EXPR)
978 new_size = size_binop (PLUS_EXPR, first_bit, size);
980 new_size = size_binop (MAX_EXPR, last_size, new_size);
984 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
985 integer_zerop (TREE_OPERAND (size, 1))
986 ? last_size : merge_sizes (last_size, first_bit,
987 TREE_OPERAND (size, 1),
989 integer_zerop (TREE_OPERAND (size, 2))
990 ? last_size : merge_sizes (last_size, first_bit,
991 TREE_OPERAND (size, 2),
994 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
995 when fed through substitute_in_expr) into thinking that a constant
996 size is not constant. */
997 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
998 new_size = TREE_OPERAND (new_size, 0);
1003 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1004 related by the addition of a constant. Return that constant if so. */
1007 compute_related_constant (tree op0, tree op1)
1009 tree op0_var, op1_var;
1010 tree op0_con = split_plus (op0, &op0_var);
1011 tree op1_con = split_plus (op1, &op1_var);
1012 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1014 if (operand_equal_p (op0_var, op1_var, 0))
1016 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1022 /* Utility function of above to split a tree OP which may be a sum, into a
1023 constant part, which is returned, and a variable part, which is stored
1024 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1028 split_plus (tree in, tree *pvar)
1030 /* Strip NOPS in order to ease the tree traversal and maximize the
1031 potential for constant or plus/minus discovery. We need to be careful
1032 to always return and set *pvar to bitsizetype trees, but it's worth
1036 *pvar = convert (bitsizetype, in);
1038 if (TREE_CODE (in) == INTEGER_CST)
1040 *pvar = bitsize_zero_node;
1041 return convert (bitsizetype, in);
1043 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1045 tree lhs_var, rhs_var;
1046 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1047 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1049 if (lhs_var == TREE_OPERAND (in, 0)
1050 && rhs_var == TREE_OPERAND (in, 1))
1051 return bitsize_zero_node;
1053 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1054 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1057 return bitsize_zero_node;
1060 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1061 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1062 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1063 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1064 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1065 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1066 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1067 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1068 invisible reference. */
1071 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1072 bool return_unconstrained_p, bool return_by_direct_ref_p,
1073 bool return_by_invisi_ref_p)
1075 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1076 the subprogram formal parameters. This list is generated by traversing
1077 the input list of PARM_DECL nodes. */
1078 tree param_type_list = NULL_TREE;
1081 for (t = param_decl_list; t; t = TREE_CHAIN (t))
1082 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (t), param_type_list);
1084 /* The list of the function parameter types has to be terminated by the void
1085 type to signal to the back-end that we are not dealing with a variable
1086 parameter subprogram, but that it has a fixed number of parameters. */
1087 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1089 /* The list of argument types has been created in reverse so reverse it. */
1090 param_type_list = nreverse (param_type_list);
1092 type = build_function_type (return_type, param_type_list);
1094 /* TYPE may have been shared since GCC hashes types. If it has a different
1095 CICO_LIST, make a copy. Likewise for the various flags. */
1096 if (TYPE_CI_CO_LIST (type) != cico_list
1097 || TYPE_RETURN_UNCONSTRAINED_P (type) != return_unconstrained_p
1098 || TYPE_RETURN_BY_DIRECT_REF_P (type) != return_by_direct_ref_p
1099 || TREE_ADDRESSABLE (type) != return_by_invisi_ref_p)
1101 type = copy_type (type);
1102 TYPE_CI_CO_LIST (type) = cico_list;
1103 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1104 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1105 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1111 /* Return a copy of TYPE but safe to modify in any way. */
1114 copy_type (tree type)
1116 tree new_type = copy_node (type);
1118 /* Unshare the language-specific data. */
1119 if (TYPE_LANG_SPECIFIC (type))
1121 TYPE_LANG_SPECIFIC (new_type) = NULL;
1122 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1125 /* And the contents of the language-specific slot if needed. */
1126 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1127 && TYPE_RM_VALUES (type))
1129 TYPE_RM_VALUES (new_type) = NULL_TREE;
1130 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1131 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1132 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1135 /* copy_node clears this field instead of copying it, because it is
1136 aliased with TREE_CHAIN. */
1137 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1139 TYPE_POINTER_TO (new_type) = 0;
1140 TYPE_REFERENCE_TO (new_type) = 0;
1141 TYPE_MAIN_VARIANT (new_type) = new_type;
1142 TYPE_NEXT_VARIANT (new_type) = 0;
1147 /* Return a subtype of sizetype with range MIN to MAX and whose
1148 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1149 of the associated TYPE_DECL. */
1152 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1154 /* First build a type for the desired range. */
1155 tree type = build_index_2_type (min, max);
1157 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1158 if (TYPE_INDEX_TYPE (type) == index)
1161 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1162 no way of sharing these types, but that's only a small hole. */
1163 if (TYPE_INDEX_TYPE (type))
1164 type = copy_type (type);
1166 SET_TYPE_INDEX_TYPE (type, index);
1167 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1172 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1173 sizetype is used. */
1176 create_range_type (tree type, tree min, tree max)
1180 if (type == NULL_TREE)
1183 /* First build a type with the base range. */
1185 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1187 min = convert (type, min);
1188 max = convert (type, max);
1190 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1191 if (TYPE_RM_MIN_VALUE (range_type)
1192 && TYPE_RM_MAX_VALUE (range_type)
1193 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1194 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1197 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1198 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1199 range_type = copy_type (range_type);
1201 /* Then set the actual range. */
1202 SET_TYPE_RM_MIN_VALUE (range_type, min);
1203 SET_TYPE_RM_MAX_VALUE (range_type, max);
1208 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1209 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1213 create_type_stub_decl (tree type_name, tree type)
1215 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1216 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1217 emitted in DWARF. */
1218 tree type_decl = build_decl (input_location,
1219 TYPE_DECL, type_name, type);
1220 DECL_ARTIFICIAL (type_decl) = 1;
1224 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1225 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1226 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1227 true if we need to write debug information about this type. GNAT_NODE
1228 is used for the position of the decl. */
1231 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1232 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1234 enum tree_code code = TREE_CODE (type);
1235 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1238 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1239 gcc_assert (!TYPE_IS_DUMMY_P (type));
1241 /* If the type hasn't been named yet, we're naming it; preserve an existing
1242 TYPE_STUB_DECL that has been attached to it for some purpose. */
1243 if (!named && TYPE_STUB_DECL (type))
1245 type_decl = TYPE_STUB_DECL (type);
1246 DECL_NAME (type_decl) = type_name;
1249 type_decl = build_decl (input_location,
1250 TYPE_DECL, type_name, type);
1252 DECL_ARTIFICIAL (type_decl) = artificial_p;
1254 /* Add this decl to the current binding level. */
1255 gnat_pushdecl (type_decl, gnat_node);
1257 process_attributes (type_decl, attr_list);
1259 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1260 This causes the name to be also viewed as a "tag" by the debug
1261 back-end, with the advantage that no DW_TAG_typedef is emitted
1262 for artificial "tagged" types in DWARF. */
1264 TYPE_STUB_DECL (type) = type_decl;
1266 /* Pass the type declaration to the debug back-end unless this is an
1267 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1268 type for which debugging information was not requested, or else an
1269 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1270 handled separately. And do not pass dummy types either. */
1271 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1272 DECL_IGNORED_P (type_decl) = 1;
1273 else if (code != ENUMERAL_TYPE
1274 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1275 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1276 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1277 && !(code == RECORD_TYPE
1279 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1280 rest_of_type_decl_compilation (type_decl);
1285 /* Return a VAR_DECL or CONST_DECL node.
1287 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1288 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1289 the GCC tree for an optional initial expression; NULL_TREE if none.
1291 CONST_FLAG is true if this variable is constant, in which case we might
1292 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1294 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1295 definition to be made visible outside of the current compilation unit, for
1296 instance variable definitions in a package specification.
1298 EXTERN_FLAG is true when processing an external variable declaration (as
1299 opposed to a definition: no storage is to be allocated for the variable).
1301 STATIC_FLAG is only relevant when not at top level. In that case
1302 it indicates whether to always allocate storage to the variable.
1304 GNAT_NODE is used for the position of the decl. */
1307 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1308 bool const_flag, bool public_flag, bool extern_flag,
1309 bool static_flag, bool const_decl_allowed_p,
1310 struct attrib *attr_list, Node_Id gnat_node)
1314 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1315 && (global_bindings_p () || static_flag
1316 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1317 : TREE_CONSTANT (var_init)));
1319 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1320 case the initializer may be used in-lieu of the DECL node (as done in
1321 Identifier_to_gnu). This is useful to prevent the need of elaboration
1322 code when an identifier for which such a decl is made is in turn used as
1323 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1324 but extra constraints apply to this choice (see below) and are not
1325 relevant to the distinction we wish to make. */
1326 bool constant_p = const_flag && init_const;
1328 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1329 and may be used for scalars in general but not for aggregates. */
1331 = build_decl (input_location,
1332 (constant_p && const_decl_allowed_p
1333 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1336 /* If this is external, throw away any initializations (they will be done
1337 elsewhere) unless this is a constant for which we would like to remain
1338 able to get the initializer. If we are defining a global here, leave a
1339 constant initialization and save any variable elaborations for the
1340 elaboration routine. If we are just annotating types, throw away the
1341 initialization if it isn't a constant. */
1342 if ((extern_flag && !constant_p)
1343 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1344 var_init = NULL_TREE;
1346 /* At the global level, an initializer requiring code to be generated
1347 produces elaboration statements. Check that such statements are allowed,
1348 that is, not violating a No_Elaboration_Code restriction. */
1349 if (global_bindings_p () && var_init != 0 && !init_const)
1350 Check_Elaboration_Code_Allowed (gnat_node);
1352 DECL_INITIAL (var_decl) = var_init;
1353 TREE_READONLY (var_decl) = const_flag;
1354 DECL_EXTERNAL (var_decl) = extern_flag;
1355 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1356 TREE_CONSTANT (var_decl) = constant_p;
1357 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1358 = TYPE_VOLATILE (type);
1360 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1361 try to fiddle with DECL_COMMON. However, on platforms that don't
1362 support global BSS sections, uninitialized global variables would
1363 go in DATA instead, thus increasing the size of the executable. */
1365 && TREE_CODE (var_decl) == VAR_DECL
1366 && TREE_PUBLIC (var_decl)
1367 && !have_global_bss_p ())
1368 DECL_COMMON (var_decl) = 1;
1370 /* If it's public and not external, always allocate storage for it.
1371 At the global binding level we need to allocate static storage for the
1372 variable if and only if it's not external. If we are not at the top level
1373 we allocate automatic storage unless requested not to. */
1374 TREE_STATIC (var_decl)
1375 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1377 /* For an external constant whose initializer is not absolute, do not emit
1378 debug info. In DWARF this would mean a global relocation in a read-only
1379 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1382 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1383 != null_pointer_node)
1384 DECL_IGNORED_P (var_decl) = 1;
1386 /* Add this decl to the current binding level. */
1387 gnat_pushdecl (var_decl, gnat_node);
1389 if (TREE_SIDE_EFFECTS (var_decl))
1390 TREE_ADDRESSABLE (var_decl) = 1;
1392 if (TREE_CODE (var_decl) == VAR_DECL)
1395 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1396 process_attributes (var_decl, attr_list);
1397 if (global_bindings_p ())
1398 rest_of_decl_compilation (var_decl, true, 0);
1401 expand_decl (var_decl);
1406 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1409 aggregate_type_contains_array_p (tree type)
1411 switch (TREE_CODE (type))
1415 case QUAL_UNION_TYPE:
1418 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1419 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1420 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1433 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1434 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1435 nonzero, it is the specified size of the field. If POS is nonzero, it is
1436 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1437 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1438 means we are allowed to take the address of the field; if it is negative,
1439 we should not make a bitfield, which is used by make_aligning_type. */
1442 create_field_decl (tree field_name, tree field_type, tree record_type,
1443 tree size, tree pos, int packed, int addressable)
1445 tree field_decl = build_decl (input_location,
1446 FIELD_DECL, field_name, field_type);
1448 DECL_CONTEXT (field_decl) = record_type;
1449 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1451 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1452 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1453 Likewise for an aggregate without specified position that contains an
1454 array, because in this case slices of variable length of this array
1455 must be handled by GCC and variable-sized objects need to be aligned
1456 to at least a byte boundary. */
1457 if (packed && (TYPE_MODE (field_type) == BLKmode
1459 && AGGREGATE_TYPE_P (field_type)
1460 && aggregate_type_contains_array_p (field_type))))
1461 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1463 /* If a size is specified, use it. Otherwise, if the record type is packed
1464 compute a size to use, which may differ from the object's natural size.
1465 We always set a size in this case to trigger the checks for bitfield
1466 creation below, which is typically required when no position has been
1469 size = convert (bitsizetype, size);
1470 else if (packed == 1)
1472 size = rm_size (field_type);
1473 if (TYPE_MODE (field_type) == BLKmode)
1474 size = round_up (size, BITS_PER_UNIT);
1477 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1478 specified for two reasons: first if the size differs from the natural
1479 size. Second, if the alignment is insufficient. There are a number of
1480 ways the latter can be true.
1482 We never make a bitfield if the type of the field has a nonconstant size,
1483 because no such entity requiring bitfield operations should reach here.
1485 We do *preventively* make a bitfield when there might be the need for it
1486 but we don't have all the necessary information to decide, as is the case
1487 of a field with no specified position in a packed record.
1489 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1490 in layout_decl or finish_record_type to clear the bit_field indication if
1491 it is in fact not needed. */
1492 if (addressable >= 0
1494 && TREE_CODE (size) == INTEGER_CST
1495 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1496 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1497 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1499 || (TYPE_ALIGN (record_type) != 0
1500 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1502 DECL_BIT_FIELD (field_decl) = 1;
1503 DECL_SIZE (field_decl) = size;
1504 if (!packed && !pos)
1506 if (TYPE_ALIGN (record_type) != 0
1507 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1508 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1510 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1514 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1516 /* Bump the alignment if need be, either for bitfield/packing purposes or
1517 to satisfy the type requirements if no such consideration applies. When
1518 we get the alignment from the type, indicate if this is from an explicit
1519 user request, which prevents stor-layout from lowering it later on. */
1521 unsigned int bit_align
1522 = (DECL_BIT_FIELD (field_decl) ? 1
1523 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1525 if (bit_align > DECL_ALIGN (field_decl))
1526 DECL_ALIGN (field_decl) = bit_align;
1527 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1529 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1530 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1536 /* We need to pass in the alignment the DECL is known to have.
1537 This is the lowest-order bit set in POS, but no more than
1538 the alignment of the record, if one is specified. Note
1539 that an alignment of 0 is taken as infinite. */
1540 unsigned int known_align;
1542 if (host_integerp (pos, 1))
1543 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1545 known_align = BITS_PER_UNIT;
1547 if (TYPE_ALIGN (record_type)
1548 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1549 known_align = TYPE_ALIGN (record_type);
1551 layout_decl (field_decl, known_align);
1552 SET_DECL_OFFSET_ALIGN (field_decl,
1553 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1555 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1556 &DECL_FIELD_BIT_OFFSET (field_decl),
1557 DECL_OFFSET_ALIGN (field_decl), pos);
1560 /* In addition to what our caller says, claim the field is addressable if we
1561 know that its type is not suitable.
1563 The field may also be "technically" nonaddressable, meaning that even if
1564 we attempt to take the field's address we will actually get the address
1565 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1566 value we have at this point is not accurate enough, so we don't account
1567 for this here and let finish_record_type decide. */
1568 if (!addressable && !type_for_nonaliased_component_p (field_type))
1571 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1576 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1577 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1578 (either an In parameter or an address of a pass-by-ref parameter). */
1581 create_param_decl (tree param_name, tree param_type, bool readonly)
1583 tree param_decl = build_decl (input_location,
1584 PARM_DECL, param_name, param_type);
1586 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1587 can lead to various ABI violations. */
1588 if (targetm.calls.promote_prototypes (NULL_TREE)
1589 && INTEGRAL_TYPE_P (param_type)
1590 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1592 /* We have to be careful about biased types here. Make a subtype
1593 of integer_type_node with the proper biasing. */
1594 if (TREE_CODE (param_type) == INTEGER_TYPE
1595 && TYPE_BIASED_REPRESENTATION_P (param_type))
1598 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1599 TREE_TYPE (subtype) = integer_type_node;
1600 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1601 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1602 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1603 param_type = subtype;
1606 param_type = integer_type_node;
1609 DECL_ARG_TYPE (param_decl) = param_type;
1610 TREE_READONLY (param_decl) = readonly;
1614 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1617 process_attributes (tree decl, struct attrib *attr_list)
1619 for (; attr_list; attr_list = attr_list->next)
1620 switch (attr_list->type)
1622 case ATTR_MACHINE_ATTRIBUTE:
1623 input_location = DECL_SOURCE_LOCATION (decl);
1624 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1626 ATTR_FLAG_TYPE_IN_PLACE);
1629 case ATTR_LINK_ALIAS:
1630 if (! DECL_EXTERNAL (decl))
1632 TREE_STATIC (decl) = 1;
1633 assemble_alias (decl, attr_list->name);
1637 case ATTR_WEAK_EXTERNAL:
1639 declare_weak (decl);
1641 post_error ("?weak declarations not supported on this target",
1642 attr_list->error_point);
1645 case ATTR_LINK_SECTION:
1646 if (targetm.have_named_sections)
1648 DECL_SECTION_NAME (decl)
1649 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1650 IDENTIFIER_POINTER (attr_list->name));
1651 DECL_COMMON (decl) = 0;
1654 post_error ("?section attributes are not supported for this target",
1655 attr_list->error_point);
1658 case ATTR_LINK_CONSTRUCTOR:
1659 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1660 TREE_USED (decl) = 1;
1663 case ATTR_LINK_DESTRUCTOR:
1664 DECL_STATIC_DESTRUCTOR (decl) = 1;
1665 TREE_USED (decl) = 1;
1668 case ATTR_THREAD_LOCAL_STORAGE:
1669 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1670 DECL_COMMON (decl) = 0;
1675 /* Record DECL as a global renaming pointer. */
1678 record_global_renaming_pointer (tree decl)
1680 gcc_assert (DECL_RENAMED_OBJECT (decl));
1681 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1684 /* Invalidate the global renaming pointers. */
1687 invalidate_global_renaming_pointers (void)
1692 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1693 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1695 VEC_free (tree, gc, global_renaming_pointers);
1698 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1702 value_factor_p (tree value, HOST_WIDE_INT factor)
1704 if (host_integerp (value, 1))
1705 return tree_low_cst (value, 1) % factor == 0;
1707 if (TREE_CODE (value) == MULT_EXPR)
1708 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1709 || value_factor_p (TREE_OPERAND (value, 1), factor));
1714 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1715 unless we can prove these 2 fields are laid out in such a way that no gap
1716 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1717 is the distance in bits between the end of PREV_FIELD and the starting
1718 position of CURR_FIELD. It is ignored if null. */
1721 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1723 /* If this is the first field of the record, there cannot be any gap */
1727 /* If the previous field is a union type, then return False: The only
1728 time when such a field is not the last field of the record is when
1729 there are other components at fixed positions after it (meaning there
1730 was a rep clause for every field), in which case we don't want the
1731 alignment constraint to override them. */
1732 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1735 /* If the distance between the end of prev_field and the beginning of
1736 curr_field is constant, then there is a gap if the value of this
1737 constant is not null. */
1738 if (offset && host_integerp (offset, 1))
1739 return !integer_zerop (offset);
1741 /* If the size and position of the previous field are constant,
1742 then check the sum of this size and position. There will be a gap
1743 iff it is not multiple of the current field alignment. */
1744 if (host_integerp (DECL_SIZE (prev_field), 1)
1745 && host_integerp (bit_position (prev_field), 1))
1746 return ((tree_low_cst (bit_position (prev_field), 1)
1747 + tree_low_cst (DECL_SIZE (prev_field), 1))
1748 % DECL_ALIGN (curr_field) != 0);
1750 /* If both the position and size of the previous field are multiples
1751 of the current field alignment, there cannot be any gap. */
1752 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1753 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1756 /* Fallback, return that there may be a potential gap */
1760 /* Returns a LABEL_DECL node for LABEL_NAME. */
1763 create_label_decl (tree label_name)
1765 tree label_decl = build_decl (input_location,
1766 LABEL_DECL, label_name, void_type_node);
1768 DECL_CONTEXT (label_decl) = current_function_decl;
1769 DECL_MODE (label_decl) = VOIDmode;
1770 DECL_SOURCE_LOCATION (label_decl) = input_location;
1775 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1776 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1777 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1778 PARM_DECL nodes chained through the TREE_CHAIN field).
1780 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1781 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1784 create_subprog_decl (tree subprog_name, tree asm_name,
1785 tree subprog_type, tree param_decl_list, bool inline_flag,
1786 bool public_flag, bool extern_flag,
1787 struct attrib *attr_list, Node_Id gnat_node)
1789 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1791 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1792 TREE_TYPE (subprog_type));
1794 /* If this is a non-inline function nested inside an inlined external
1795 function, we cannot honor both requests without cloning the nested
1796 function in the current unit since it is private to the other unit.
1797 We could inline the nested function as well but it's probably better
1798 to err on the side of too little inlining. */
1800 && current_function_decl
1801 && DECL_DECLARED_INLINE_P (current_function_decl)
1802 && DECL_EXTERNAL (current_function_decl))
1803 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1805 DECL_EXTERNAL (subprog_decl) = extern_flag;
1806 TREE_PUBLIC (subprog_decl) = public_flag;
1807 TREE_STATIC (subprog_decl) = 1;
1808 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1809 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1810 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1811 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1812 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1814 DECL_ARTIFICIAL (result_decl) = 1;
1815 DECL_IGNORED_P (result_decl) = 1;
1816 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1817 DECL_RESULT (subprog_decl) = result_decl;
1821 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1823 /* The expand_main_function circuitry expects "main_identifier_node" to
1824 designate the DECL_NAME of the 'main' entry point, in turn expected
1825 to be declared as the "main" function literally by default. Ada
1826 program entry points are typically declared with a different name
1827 within the binder generated file, exported as 'main' to satisfy the
1828 system expectations. Force main_identifier_node in this case. */
1829 if (asm_name == main_identifier_node)
1830 DECL_NAME (subprog_decl) = main_identifier_node;
1833 /* Add this decl to the current binding level. */
1834 gnat_pushdecl (subprog_decl, gnat_node);
1836 process_attributes (subprog_decl, attr_list);
1838 /* Output the assembler code and/or RTL for the declaration. */
1839 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1841 return subprog_decl;
1844 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1845 body. This routine needs to be invoked before processing the declarations
1846 appearing in the subprogram. */
1849 begin_subprog_body (tree subprog_decl)
1853 announce_function (subprog_decl);
1855 current_function_decl = subprog_decl;
1857 /* Enter a new binding level and show that all the parameters belong to
1861 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1862 param_decl = TREE_CHAIN (param_decl))
1863 DECL_CONTEXT (param_decl) = subprog_decl;
1865 make_decl_rtl (subprog_decl);
1867 /* We handle pending sizes via the elaboration of types, so we don't need to
1868 save them. This causes them to be marked as part of the outer function
1869 and then discarded. */
1870 get_pending_sizes ();
1873 /* Finish the definition of the current subprogram BODY and finalize it. */
1876 end_subprog_body (tree body)
1878 tree fndecl = current_function_decl;
1880 /* Mark the BLOCK for this level as being for this function and pop the
1881 level. Since the vars in it are the parameters, clear them. */
1882 BLOCK_VARS (current_binding_level->block) = NULL_TREE;
1883 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1884 DECL_INITIAL (fndecl) = current_binding_level->block;
1887 /* We handle pending sizes via the elaboration of types, so we don't
1888 need to save them. */
1889 get_pending_sizes ();
1891 /* Mark the RESULT_DECL as being in this subprogram. */
1892 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1894 DECL_SAVED_TREE (fndecl) = body;
1896 current_function_decl = DECL_CONTEXT (fndecl);
1898 /* We cannot track the location of errors past this point. */
1899 error_gnat_node = Empty;
1901 /* If we're only annotating types, don't actually compile this function. */
1902 if (type_annotate_only)
1905 /* Dump functions before gimplification. */
1906 dump_function (TDI_original, fndecl);
1908 /* ??? This special handling of nested functions is probably obsolete. */
1909 if (!DECL_CONTEXT (fndecl))
1910 cgraph_finalize_function (fndecl, false);
1912 /* Register this function with cgraph just far enough to get it
1913 added to our parent's nested function list. */
1914 (void) cgraph_node (fndecl);
1918 gnat_builtin_function (tree decl)
1920 gnat_pushdecl (decl, Empty);
1924 /* Return an integer type with the number of bits of precision given by
1925 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1926 it is a signed type. */
1929 gnat_type_for_size (unsigned precision, int unsignedp)
1934 if (precision <= 2 * MAX_BITS_PER_WORD
1935 && signed_and_unsigned_types[precision][unsignedp])
1936 return signed_and_unsigned_types[precision][unsignedp];
1939 t = make_unsigned_type (precision);
1941 t = make_signed_type (precision);
1943 if (precision <= 2 * MAX_BITS_PER_WORD)
1944 signed_and_unsigned_types[precision][unsignedp] = t;
1948 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1949 TYPE_NAME (t) = get_identifier (type_name);
1955 /* Likewise for floating-point types. */
1958 float_type_for_precision (int precision, enum machine_mode mode)
1963 if (float_types[(int) mode])
1964 return float_types[(int) mode];
1966 float_types[(int) mode] = t = make_node (REAL_TYPE);
1967 TYPE_PRECISION (t) = precision;
1970 gcc_assert (TYPE_MODE (t) == mode);
1973 sprintf (type_name, "FLOAT_%d", precision);
1974 TYPE_NAME (t) = get_identifier (type_name);
1980 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1981 an unsigned type; otherwise a signed type is returned. */
1984 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1986 if (mode == BLKmode)
1989 if (mode == VOIDmode)
1990 return void_type_node;
1992 if (COMPLEX_MODE_P (mode))
1995 if (SCALAR_FLOAT_MODE_P (mode))
1996 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1998 if (SCALAR_INT_MODE_P (mode))
1999 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2001 if (VECTOR_MODE_P (mode))
2003 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2004 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2006 return build_vector_type_for_mode (inner_type, mode);
2012 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2015 gnat_unsigned_type (tree type_node)
2017 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2019 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2021 type = copy_node (type);
2022 TREE_TYPE (type) = type_node;
2024 else if (TREE_TYPE (type_node)
2025 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2026 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2028 type = copy_node (type);
2029 TREE_TYPE (type) = TREE_TYPE (type_node);
2035 /* Return the signed version of a TYPE_NODE, a scalar type. */
2038 gnat_signed_type (tree type_node)
2040 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2042 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2044 type = copy_node (type);
2045 TREE_TYPE (type) = type_node;
2047 else if (TREE_TYPE (type_node)
2048 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2049 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2051 type = copy_node (type);
2052 TREE_TYPE (type) = TREE_TYPE (type_node);
2058 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2059 transparently converted to each other. */
2062 gnat_types_compatible_p (tree t1, tree t2)
2064 enum tree_code code;
2066 /* This is the default criterion. */
2067 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2070 /* We only check structural equivalence here. */
2071 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2074 /* Vector types are also compatible if they have the same number of subparts
2075 and the same form of (scalar) element type. */
2076 if (code == VECTOR_TYPE
2077 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2078 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2079 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2082 /* Array types are also compatible if they are constrained and have
2083 the same component type and the same domain. */
2084 if (code == ARRAY_TYPE
2085 && TREE_TYPE (t1) == TREE_TYPE (t2)
2086 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2087 || (TYPE_DOMAIN (t1)
2089 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2090 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2091 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2092 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2095 /* Padding record types are also compatible if they pad the same
2096 type and have the same constant size. */
2097 if (code == RECORD_TYPE
2098 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2099 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2100 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2106 /* EXP is an expression for the size of an object. If this size contains
2107 discriminant references, replace them with the maximum (if MAX_P) or
2108 minimum (if !MAX_P) possible value of the discriminant. */
2111 max_size (tree exp, bool max_p)
2113 enum tree_code code = TREE_CODE (exp);
2114 tree type = TREE_TYPE (exp);
2116 switch (TREE_CODE_CLASS (code))
2118 case tcc_declaration:
2123 if (code == CALL_EXPR)
2128 t = maybe_inline_call_in_expr (exp);
2130 return max_size (t, max_p);
2132 n = call_expr_nargs (exp);
2134 argarray = (tree *) alloca (n * sizeof (tree));
2135 for (i = 0; i < n; i++)
2136 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2137 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2142 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2143 modify. Otherwise, we treat it like a variable. */
2144 if (!CONTAINS_PLACEHOLDER_P (exp))
2147 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2149 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2151 case tcc_comparison:
2152 return max_p ? size_one_node : size_zero_node;
2156 case tcc_expression:
2157 switch (TREE_CODE_LENGTH (code))
2160 if (code == NON_LVALUE_EXPR)
2161 return max_size (TREE_OPERAND (exp, 0), max_p);
2164 fold_build1 (code, type,
2165 max_size (TREE_OPERAND (exp, 0),
2166 code == NEGATE_EXPR ? !max_p : max_p));
2169 if (code == COMPOUND_EXPR)
2170 return max_size (TREE_OPERAND (exp, 1), max_p);
2173 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2174 tree rhs = max_size (TREE_OPERAND (exp, 1),
2175 code == MINUS_EXPR ? !max_p : max_p);
2177 /* Special-case wanting the maximum value of a MIN_EXPR.
2178 In that case, if one side overflows, return the other.
2179 sizetype is signed, but we know sizes are non-negative.
2180 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2181 overflowing and the RHS a variable. */
2184 && TREE_CODE (rhs) == INTEGER_CST
2185 && TREE_OVERFLOW (rhs))
2189 && TREE_CODE (lhs) == INTEGER_CST
2190 && TREE_OVERFLOW (lhs))
2192 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2193 && TREE_CODE (lhs) == INTEGER_CST
2194 && TREE_OVERFLOW (lhs)
2195 && !TREE_CONSTANT (rhs))
2198 return fold_build2 (code, type, lhs, rhs);
2202 if (code == SAVE_EXPR)
2204 else if (code == COND_EXPR)
2205 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2206 max_size (TREE_OPERAND (exp, 1), max_p),
2207 max_size (TREE_OPERAND (exp, 2), max_p));
2210 /* Other tree classes cannot happen. */
2218 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2219 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2220 Return a constructor for the template. */
2223 build_template (tree template_type, tree array_type, tree expr)
2225 tree template_elts = NULL_TREE;
2226 tree bound_list = NULL_TREE;
2229 while (TREE_CODE (array_type) == RECORD_TYPE
2230 && (TYPE_PADDING_P (array_type)
2231 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2232 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2234 if (TREE_CODE (array_type) == ARRAY_TYPE
2235 || (TREE_CODE (array_type) == INTEGER_TYPE
2236 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2237 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2239 /* First make the list for a CONSTRUCTOR for the template. Go down the
2240 field list of the template instead of the type chain because this
2241 array might be an Ada array of arrays and we can't tell where the
2242 nested arrays stop being the underlying object. */
2244 for (field = TYPE_FIELDS (template_type); field;
2246 ? (bound_list = TREE_CHAIN (bound_list))
2247 : (array_type = TREE_TYPE (array_type))),
2248 field = TREE_CHAIN (TREE_CHAIN (field)))
2250 tree bounds, min, max;
2252 /* If we have a bound list, get the bounds from there. Likewise
2253 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2254 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2255 This will give us a maximum range. */
2257 bounds = TREE_VALUE (bound_list);
2258 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2259 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2260 else if (expr && TREE_CODE (expr) == PARM_DECL
2261 && DECL_BY_COMPONENT_PTR_P (expr))
2262 bounds = TREE_TYPE (field);
2266 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2267 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2269 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2270 substitute it from OBJECT. */
2271 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2272 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2274 template_elts = tree_cons (TREE_CHAIN (field), max,
2275 tree_cons (field, min, template_elts));
2278 return gnat_build_constructor (template_type, nreverse (template_elts));
2281 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2282 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2283 type contains in its DECL_INITIAL the expression to use when a constructor
2284 is made for the type. GNAT_ENTITY is an entity used to print out an error
2285 message if the mechanism cannot be applied to an object of that type and
2286 also for the name. */
2289 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2291 tree record_type = make_node (RECORD_TYPE);
2292 tree pointer32_type;
2293 tree field_list = 0;
2302 /* If TYPE is an unconstrained array, use the underlying array type. */
2303 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2304 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2306 /* If this is an array, compute the number of dimensions in the array,
2307 get the index types, and point to the inner type. */
2308 if (TREE_CODE (type) != ARRAY_TYPE)
2311 for (ndim = 1, inner_type = type;
2312 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2313 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2314 ndim++, inner_type = TREE_TYPE (inner_type))
2317 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2319 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2320 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2321 for (i = ndim - 1, inner_type = type;
2323 i--, inner_type = TREE_TYPE (inner_type))
2324 idx_arr[i] = TYPE_DOMAIN (inner_type);
2326 for (i = 0, inner_type = type;
2328 i++, inner_type = TREE_TYPE (inner_type))
2329 idx_arr[i] = TYPE_DOMAIN (inner_type);
2331 /* Now get the DTYPE value. */
2332 switch (TREE_CODE (type))
2337 if (TYPE_VAX_FLOATING_POINT_P (type))
2338 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2351 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2354 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2357 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2360 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2363 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2366 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2372 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2376 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2377 && TYPE_VAX_FLOATING_POINT_P (type))
2378 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2390 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2401 /* Get the CLASS value. */
2404 case By_Descriptor_A:
2405 case By_Short_Descriptor_A:
2408 case By_Descriptor_NCA:
2409 case By_Short_Descriptor_NCA:
2412 case By_Descriptor_SB:
2413 case By_Short_Descriptor_SB:
2417 case By_Short_Descriptor:
2418 case By_Descriptor_S:
2419 case By_Short_Descriptor_S:
2425 /* Make the type for a descriptor for VMS. The first four fields are the
2426 same for all types. */
2428 = chainon (field_list,
2429 make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1),
2432 ((mech == By_Descriptor_A
2433 || mech == By_Short_Descriptor_A)
2434 ? inner_type : type)));
2436 = chainon (field_list,
2437 make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2438 record_type, size_int (dtype)));
2440 = chainon (field_list,
2441 make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2442 record_type, size_int (klass)));
2444 /* Of course this will crash at run time if the address space is not
2445 within the low 32 bits, but there is nothing else we can do. */
2446 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2449 = chainon (field_list,
2450 make_descriptor_field ("POINTER", pointer32_type, record_type,
2451 build_unary_op (ADDR_EXPR,
2453 build0 (PLACEHOLDER_EXPR,
2459 case By_Short_Descriptor:
2460 case By_Descriptor_S:
2461 case By_Short_Descriptor_S:
2464 case By_Descriptor_SB:
2465 case By_Short_Descriptor_SB:
2467 = chainon (field_list,
2468 make_descriptor_field
2469 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2470 TREE_CODE (type) == ARRAY_TYPE
2471 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2473 = chainon (field_list,
2474 make_descriptor_field
2475 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2476 TREE_CODE (type) == ARRAY_TYPE
2477 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2480 case By_Descriptor_A:
2481 case By_Short_Descriptor_A:
2482 case By_Descriptor_NCA:
2483 case By_Short_Descriptor_NCA:
2484 field_list = chainon (field_list,
2485 make_descriptor_field ("SCALE",
2486 gnat_type_for_size (8, 1),
2490 field_list = chainon (field_list,
2491 make_descriptor_field ("DIGITS",
2492 gnat_type_for_size (8, 1),
2497 = chainon (field_list,
2498 make_descriptor_field
2499 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2500 size_int ((mech == By_Descriptor_NCA ||
2501 mech == By_Short_Descriptor_NCA)
2503 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2504 : (TREE_CODE (type) == ARRAY_TYPE
2505 && TYPE_CONVENTION_FORTRAN_P (type)
2508 field_list = chainon (field_list,
2509 make_descriptor_field ("DIMCT",
2510 gnat_type_for_size (8, 1),
2514 field_list = chainon (field_list,
2515 make_descriptor_field ("ARSIZE",
2516 gnat_type_for_size (32, 1),
2518 size_in_bytes (type)));
2520 /* Now build a pointer to the 0,0,0... element. */
2521 tem = build0 (PLACEHOLDER_EXPR, type);
2522 for (i = 0, inner_type = type; i < ndim;
2523 i++, inner_type = TREE_TYPE (inner_type))
2524 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2525 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2526 NULL_TREE, NULL_TREE);
2529 = chainon (field_list,
2530 make_descriptor_field
2532 build_pointer_type_for_mode (inner_type, SImode, false),
2535 build_pointer_type_for_mode (inner_type, SImode,
2539 /* Next come the addressing coefficients. */
2540 tem = size_one_node;
2541 for (i = 0; i < ndim; i++)
2545 = size_binop (MULT_EXPR, tem,
2546 size_binop (PLUS_EXPR,
2547 size_binop (MINUS_EXPR,
2548 TYPE_MAX_VALUE (idx_arr[i]),
2549 TYPE_MIN_VALUE (idx_arr[i])),
2552 fname[0] = ((mech == By_Descriptor_NCA ||
2553 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2554 fname[1] = '0' + i, fname[2] = 0;
2556 = chainon (field_list,
2557 make_descriptor_field (fname,
2558 gnat_type_for_size (32, 1),
2559 record_type, idx_length));
2561 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2565 /* Finally here are the bounds. */
2566 for (i = 0; i < ndim; i++)
2570 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2572 = chainon (field_list,
2573 make_descriptor_field
2574 (fname, gnat_type_for_size (32, 1), record_type,
2575 TYPE_MIN_VALUE (idx_arr[i])));
2579 = chainon (field_list,
2580 make_descriptor_field
2581 (fname, gnat_type_for_size (32, 1), record_type,
2582 TYPE_MAX_VALUE (idx_arr[i])));
2587 post_error ("unsupported descriptor type for &", gnat_entity);
2590 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2591 finish_record_type (record_type, field_list, 0, false);
2595 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2596 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2597 type contains in its DECL_INITIAL the expression to use when a constructor
2598 is made for the type. GNAT_ENTITY is an entity used to print out an error
2599 message if the mechanism cannot be applied to an object of that type and
2600 also for the name. */
2603 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2605 tree record64_type = make_node (RECORD_TYPE);
2606 tree pointer64_type;
2607 tree field_list64 = 0;
2616 /* If TYPE is an unconstrained array, use the underlying array type. */
2617 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2618 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2620 /* If this is an array, compute the number of dimensions in the array,
2621 get the index types, and point to the inner type. */
2622 if (TREE_CODE (type) != ARRAY_TYPE)
2625 for (ndim = 1, inner_type = type;
2626 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2627 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2628 ndim++, inner_type = TREE_TYPE (inner_type))
2631 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2633 if (mech != By_Descriptor_NCA
2634 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2635 for (i = ndim - 1, inner_type = type;
2637 i--, inner_type = TREE_TYPE (inner_type))
2638 idx_arr[i] = TYPE_DOMAIN (inner_type);
2640 for (i = 0, inner_type = type;
2642 i++, inner_type = TREE_TYPE (inner_type))
2643 idx_arr[i] = TYPE_DOMAIN (inner_type);
2645 /* Now get the DTYPE value. */
2646 switch (TREE_CODE (type))
2651 if (TYPE_VAX_FLOATING_POINT_P (type))
2652 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2665 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2668 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2671 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2674 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2677 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2680 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2686 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2690 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2691 && TYPE_VAX_FLOATING_POINT_P (type))
2692 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2704 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2715 /* Get the CLASS value. */
2718 case By_Descriptor_A:
2721 case By_Descriptor_NCA:
2724 case By_Descriptor_SB:
2728 case By_Descriptor_S:
2734 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2735 are the same for all types. */
2737 = chainon (field_list64,
2738 make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2739 record64_type, size_int (1)));
2741 = chainon (field_list64,
2742 make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2743 record64_type, size_int (dtype)));
2745 = chainon (field_list64,
2746 make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2747 record64_type, size_int (klass)));
2749 = chainon (field_list64,
2750 make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2751 record64_type, ssize_int (-1)));
2753 = chainon (field_list64,
2754 make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2756 size_in_bytes (mech == By_Descriptor_A
2757 ? inner_type : type)));
2759 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2762 = chainon (field_list64,
2763 make_descriptor_field ("POINTER", pointer64_type,
2765 build_unary_op (ADDR_EXPR,
2767 build0 (PLACEHOLDER_EXPR,
2773 case By_Descriptor_S:
2776 case By_Descriptor_SB:
2778 = chainon (field_list64,
2779 make_descriptor_field
2780 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2781 TREE_CODE (type) == ARRAY_TYPE
2782 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2784 = chainon (field_list64,
2785 make_descriptor_field
2786 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2787 TREE_CODE (type) == ARRAY_TYPE
2788 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2791 case By_Descriptor_A:
2792 case By_Descriptor_NCA:
2793 field_list64 = chainon (field_list64,
2794 make_descriptor_field ("SCALE",
2795 gnat_type_for_size (8, 1),
2799 field_list64 = chainon (field_list64,
2800 make_descriptor_field ("DIGITS",
2801 gnat_type_for_size (8, 1),
2806 = chainon (field_list64,
2807 make_descriptor_field
2808 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2809 size_int (mech == By_Descriptor_NCA
2811 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2812 : (TREE_CODE (type) == ARRAY_TYPE
2813 && TYPE_CONVENTION_FORTRAN_P (type)
2816 field_list64 = chainon (field_list64,
2817 make_descriptor_field ("DIMCT",
2818 gnat_type_for_size (8, 1),
2822 field_list64 = chainon (field_list64,
2823 make_descriptor_field ("MBZ",
2824 gnat_type_for_size (32, 1),
2827 field_list64 = chainon (field_list64,
2828 make_descriptor_field ("ARSIZE",
2829 gnat_type_for_size (64, 1),
2831 size_in_bytes (type)));
2833 /* Now build a pointer to the 0,0,0... element. */
2834 tem = build0 (PLACEHOLDER_EXPR, type);
2835 for (i = 0, inner_type = type; i < ndim;
2836 i++, inner_type = TREE_TYPE (inner_type))
2837 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2838 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2839 NULL_TREE, NULL_TREE);
2842 = chainon (field_list64,
2843 make_descriptor_field
2845 build_pointer_type_for_mode (inner_type, DImode, false),
2848 build_pointer_type_for_mode (inner_type, DImode,
2852 /* Next come the addressing coefficients. */
2853 tem = size_one_node;
2854 for (i = 0; i < ndim; i++)
2858 = size_binop (MULT_EXPR, tem,
2859 size_binop (PLUS_EXPR,
2860 size_binop (MINUS_EXPR,
2861 TYPE_MAX_VALUE (idx_arr[i]),
2862 TYPE_MIN_VALUE (idx_arr[i])),
2865 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2866 fname[1] = '0' + i, fname[2] = 0;
2868 = chainon (field_list64,
2869 make_descriptor_field (fname,
2870 gnat_type_for_size (64, 1),
2871 record64_type, idx_length));
2873 if (mech == By_Descriptor_NCA)
2877 /* Finally here are the bounds. */
2878 for (i = 0; i < ndim; i++)
2882 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2884 = chainon (field_list64,
2885 make_descriptor_field
2886 (fname, gnat_type_for_size (64, 1), record64_type,
2887 TYPE_MIN_VALUE (idx_arr[i])));
2891 = chainon (field_list64,
2892 make_descriptor_field
2893 (fname, gnat_type_for_size (64, 1), record64_type,
2894 TYPE_MAX_VALUE (idx_arr[i])));
2899 post_error ("unsupported descriptor type for &", gnat_entity);
2902 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
2903 finish_record_type (record64_type, field_list64, 0, false);
2904 return record64_type;
2907 /* Utility routine for above code to make a field. */
2910 make_descriptor_field (const char *name, tree type,
2911 tree rec_type, tree initial)
2914 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
2917 DECL_INITIAL (field) = initial;
2921 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
2922 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
2923 which the VMS descriptor is passed. */
2926 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2928 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2929 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2930 /* The CLASS field is the 3rd field in the descriptor. */
2931 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2932 /* The POINTER field is the 6th field in the descriptor. */
2933 tree pointer = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
2935 /* Retrieve the value of the POINTER field. */
2937 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2939 if (POINTER_TYPE_P (gnu_type))
2940 return convert (gnu_type, gnu_expr64);
2942 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
2944 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2945 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2946 tree template_type = TREE_TYPE (p_bounds_type);
2947 tree min_field = TYPE_FIELDS (template_type);
2948 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2949 tree template_tree, template_addr, aflags, dimct, t, u;
2950 /* See the head comment of build_vms_descriptor. */
2951 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
2952 tree lfield, ufield;
2954 /* Convert POINTER to the pointer-to-array type. */
2955 gnu_expr64 = convert (p_array_type, gnu_expr64);
2959 case 1: /* Class S */
2960 case 15: /* Class SB */
2961 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
2962 t = TREE_CHAIN (TREE_CHAIN (klass));
2963 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2964 t = tree_cons (min_field,
2965 convert (TREE_TYPE (min_field), integer_one_node),
2966 tree_cons (max_field,
2967 convert (TREE_TYPE (max_field), t),
2969 template_tree = gnat_build_constructor (template_type, t);
2970 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
2972 /* For class S, we are done. */
2976 /* Test that we really have a SB descriptor, like DEC Ada. */
2977 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
2978 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
2979 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
2980 /* If so, there is already a template in the descriptor and
2981 it is located right after the POINTER field. The fields are
2982 64bits so they must be repacked. */
2983 t = TREE_CHAIN (pointer);
2984 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2985 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
2988 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2990 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
2992 /* Build the template in the form of a constructor. */
2993 t = tree_cons (TYPE_FIELDS (template_type), lfield,
2994 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
2995 ufield, NULL_TREE));
2996 template_tree = gnat_build_constructor (template_type, t);
2998 /* Otherwise use the {1, LENGTH} template we build above. */
2999 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3000 build_unary_op (ADDR_EXPR, p_bounds_type,
3005 case 4: /* Class A */
3006 /* The AFLAGS field is the 3rd field after the pointer in the
3008 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3009 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3010 /* The DIMCT field is the next field in the descriptor after
3013 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3014 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3015 or FL_COEFF or FL_BOUNDS not set. */
3016 u = build_int_cst (TREE_TYPE (aflags), 192);
3017 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3018 build_binary_op (NE_EXPR, boolean_type_node,
3020 convert (TREE_TYPE (dimct),
3022 build_binary_op (NE_EXPR, boolean_type_node,
3023 build2 (BIT_AND_EXPR,
3027 /* There is already a template in the descriptor and it is located
3028 in block 3. The fields are 64bits so they must be repacked. */
3029 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3031 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3032 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3035 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3037 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3039 /* Build the template in the form of a constructor. */
3040 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3041 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3042 ufield, NULL_TREE));
3043 template_tree = gnat_build_constructor (template_type, t);
3044 template_tree = build3 (COND_EXPR, template_type, u,
3045 build_call_raise (CE_Length_Check_Failed, Empty,
3046 N_Raise_Constraint_Error),
3049 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3052 case 10: /* Class NCA */
3054 post_error ("unsupported descriptor type for &", gnat_subprog);
3055 template_addr = integer_zero_node;
3059 /* Build the fat pointer in the form of a constructor. */
3060 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3061 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3062 template_addr, NULL_TREE));
3063 return gnat_build_constructor (gnu_type, t);
3070 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3071 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3072 which the VMS descriptor is passed. */
3075 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3077 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3078 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3079 /* The CLASS field is the 3rd field in the descriptor. */
3080 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3081 /* The POINTER field is the 4th field in the descriptor. */
3082 tree pointer = TREE_CHAIN (klass);
3084 /* Retrieve the value of the POINTER field. */
3086 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3088 if (POINTER_TYPE_P (gnu_type))
3089 return convert (gnu_type, gnu_expr32);
3091 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3093 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3094 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3095 tree template_type = TREE_TYPE (p_bounds_type);
3096 tree min_field = TYPE_FIELDS (template_type);
3097 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3098 tree template_tree, template_addr, aflags, dimct, t, u;
3099 /* See the head comment of build_vms_descriptor. */
3100 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3102 /* Convert POINTER to the pointer-to-array type. */
3103 gnu_expr32 = convert (p_array_type, gnu_expr32);
3107 case 1: /* Class S */
3108 case 15: /* Class SB */
3109 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3110 t = TYPE_FIELDS (desc_type);
3111 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3112 t = tree_cons (min_field,
3113 convert (TREE_TYPE (min_field), integer_one_node),
3114 tree_cons (max_field,
3115 convert (TREE_TYPE (max_field), t),
3117 template_tree = gnat_build_constructor (template_type, t);
3118 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3120 /* For class S, we are done. */
3124 /* Test that we really have a SB descriptor, like DEC Ada. */
3125 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3126 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3127 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3128 /* If so, there is already a template in the descriptor and
3129 it is located right after the POINTER field. */
3130 t = TREE_CHAIN (pointer);
3132 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3133 /* Otherwise use the {1, LENGTH} template we build above. */
3134 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3135 build_unary_op (ADDR_EXPR, p_bounds_type,
3140 case 4: /* Class A */
3141 /* The AFLAGS field is the 7th field in the descriptor. */
3142 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3143 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3144 /* The DIMCT field is the 8th field in the descriptor. */
3146 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3147 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3148 or FL_COEFF or FL_BOUNDS not set. */
3149 u = build_int_cst (TREE_TYPE (aflags), 192);
3150 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3151 build_binary_op (NE_EXPR, boolean_type_node,
3153 convert (TREE_TYPE (dimct),
3155 build_binary_op (NE_EXPR, boolean_type_node,
3156 build2 (BIT_AND_EXPR,
3160 /* There is already a template in the descriptor and it is
3161 located at the start of block 3 (12th field). */
3162 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3164 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3165 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3166 build_call_raise (CE_Length_Check_Failed, Empty,
3167 N_Raise_Constraint_Error),
3170 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3173 case 10: /* Class NCA */
3175 post_error ("unsupported descriptor type for &", gnat_subprog);
3176 template_addr = integer_zero_node;
3180 /* Build the fat pointer in the form of a constructor. */
3181 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3182 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3183 template_addr, NULL_TREE));
3185 return gnat_build_constructor (gnu_type, t);
3192 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3193 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3194 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3195 VMS descriptor is passed. */
3198 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3199 Entity_Id gnat_subprog)
3201 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3202 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3203 tree mbo = TYPE_FIELDS (desc_type);
3204 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3205 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3206 tree is64bit, gnu_expr32, gnu_expr64;
3208 /* If the field name is not MBO, it must be 32-bit and no alternate.
3209 Otherwise primary must be 64-bit and alternate 32-bit. */
3210 if (strcmp (mbostr, "MBO") != 0)
3211 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3213 /* Build the test for 64-bit descriptor. */
3214 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3215 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3217 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3218 build_binary_op (EQ_EXPR, boolean_type_node,
3219 convert (integer_type_node, mbo),
3221 build_binary_op (EQ_EXPR, boolean_type_node,
3222 convert (integer_type_node, mbmo),
3223 integer_minus_one_node));
3225 /* Build the 2 possible end results. */
3226 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3227 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3228 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3230 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3233 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3234 and the GNAT node GNAT_SUBPROG. */
3237 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3239 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3240 tree gnu_stub_param, gnu_arg_types, gnu_param;
3241 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3243 VEC(tree,gc) *gnu_param_vec = NULL;
3245 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3247 begin_subprog_body (gnu_stub_decl);
3250 start_stmt_group ();
3252 /* Loop over the parameters of the stub and translate any of them
3253 passed by descriptor into a by reference one. */
3254 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3255 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3257 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3258 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3260 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3262 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3264 DECL_PARM_ALT_TYPE (gnu_stub_param),
3267 gnu_param = gnu_stub_param;
3269 VEC_safe_push (tree, gc, gnu_param_vec, gnu_param);
3272 gnu_body = end_stmt_group ();
3274 /* Invoke the internal subprogram. */
3275 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3277 gnu_subprog_call = build_call_vec (TREE_TYPE (gnu_subprog_type),
3278 gnu_subprog_addr, gnu_param_vec);
3280 /* Propagate the return value, if any. */
3281 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3282 append_to_statement_list (gnu_subprog_call, &gnu_body);
3284 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3290 allocate_struct_function (gnu_stub_decl, false);
3291 end_subprog_body (gnu_body);
3294 /* Build a type to be used to represent an aliased object whose nominal type
3295 is an unconstrained array. This consists of a RECORD_TYPE containing a
3296 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3297 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3298 an arbitrary unconstrained object. Use NAME as the name of the record.
3299 DEBUG_INFO_P is true if we need to write debug information for the type. */
3302 build_unc_object_type (tree template_type, tree object_type, tree name,
3305 tree type = make_node (RECORD_TYPE);
3307 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3308 NULL_TREE, NULL_TREE, 0, 1);
3310 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3311 NULL_TREE, NULL_TREE, 0, 1);
3313 TYPE_NAME (type) = name;
3314 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3315 TREE_CHAIN (template_field) = array_field;
3316 finish_record_type (type, template_field, 0, true);
3318 /* Declare it now since it will never be declared otherwise. This is
3319 necessary to ensure that its subtrees are properly marked. */
3320 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
3325 /* Same, taking a thin or fat pointer type instead of a template type. */
3328 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3329 tree name, bool debug_info_p)
3333 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3336 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3337 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3338 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3341 build_unc_object_type (template_type, object_type, name, debug_info_p);
3344 /* Shift the component offsets within an unconstrained object TYPE to make it
3345 suitable for use as a designated type for thin pointers. */
3348 shift_unc_components_for_thin_pointers (tree type)
3350 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3351 allocated past the BOUNDS template. The designated type is adjusted to
3352 have ARRAY at position zero and the template at a negative offset, so
3353 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3355 tree bounds_field = TYPE_FIELDS (type);
3356 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3358 DECL_FIELD_OFFSET (bounds_field)
3359 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3361 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3362 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3365 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3366 In the normal case this is just two adjustments, but we have more to
3367 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3370 update_pointer_to (tree old_type, tree new_type)
3372 tree ptr = TYPE_POINTER_TO (old_type);
3373 tree ref = TYPE_REFERENCE_TO (old_type);
3376 /* If this is the main variant, process all the other variants first. */
3377 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3378 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3379 update_pointer_to (t, new_type);
3381 /* If no pointers and no references, we are done. */
3385 /* Merge the old type qualifiers in the new type.
3387 Each old variant has qualifiers for specific reasons, and the new
3388 designated type as well. Each set of qualifiers represents useful
3389 information grabbed at some point, and merging the two simply unifies
3390 these inputs into the final type description.
3392 Consider for instance a volatile type frozen after an access to constant
3393 type designating it; after the designated type's freeze, we get here with
3394 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3395 when the access type was processed. We will make a volatile and readonly
3396 designated type, because that's what it really is.
3398 We might also get here for a non-dummy OLD_TYPE variant with different
3399 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3400 to private record type elaboration (see the comments around the call to
3401 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3402 the qualifiers in those cases too, to avoid accidentally discarding the
3403 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3405 = build_qualified_type (new_type,
3406 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3408 /* If old type and new type are identical, there is nothing to do. */
3409 if (old_type == new_type)
3412 /* Otherwise, first handle the simple case. */
3413 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3415 tree new_ptr, new_ref;
3417 /* If pointer or reference already points to new type, nothing to do.
3418 This can happen as update_pointer_to can be invoked multiple times
3419 on the same couple of types because of the type variants. */
3420 if ((ptr && TREE_TYPE (ptr) == new_type)
3421 || (ref && TREE_TYPE (ref) == new_type))
3424 /* Chain PTR and its variants at the end. */
3425 new_ptr = TYPE_POINTER_TO (new_type);
3428 while (TYPE_NEXT_PTR_TO (new_ptr))
3429 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3430 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3433 TYPE_POINTER_TO (new_type) = ptr;
3435 /* Now adjust them. */
3436 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3437 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3438 TREE_TYPE (t) = new_type;
3440 /* Chain REF and its variants at the end. */
3441 new_ref = TYPE_REFERENCE_TO (new_type);
3444 while (TYPE_NEXT_REF_TO (new_ref))
3445 new_ref = TYPE_NEXT_REF_TO (new_ref);
3446 TYPE_NEXT_REF_TO (new_ref) = ref;
3449 TYPE_REFERENCE_TO (new_type) = ref;
3451 /* Now adjust them. */
3452 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3453 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3454 TREE_TYPE (t) = new_type;
3457 /* Now deal with the unconstrained array case. In this case the pointer
3458 is actually a record where both fields are pointers to dummy nodes.
3459 Turn them into pointers to the correct types using update_pointer_to. */
3462 tree new_ptr = TYPE_MAIN_VARIANT (TYPE_POINTER_TO (new_type));
3463 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3464 tree array_field, bounds_field, new_ref, last;
3466 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3468 /* If PTR already points to new type, nothing to do. This can happen
3469 since update_pointer_to can be invoked multiple times on the same
3470 couple of types because of the type variants. */
3471 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3474 array_field = TYPE_FIELDS (ptr);
3475 bounds_field = TREE_CHAIN (array_field);
3477 /* Make pointers to the dummy template point to the real template. */
3479 (TREE_TYPE (TREE_TYPE (bounds_field)),
3480 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3482 /* The references to the template bounds present in the array type use
3483 the bounds field of NEW_PTR through a PLACEHOLDER_EXPR. Since we
3484 are going to merge PTR in NEW_PTR, we must rework these references
3485 to use the bounds field of PTR instead. */
3486 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3487 build0 (PLACEHOLDER_EXPR, new_ptr),
3488 bounds_field, NULL_TREE);
3490 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3491 to the dummy array point to it. */
3493 (TREE_TYPE (TREE_TYPE (array_field)),
3494 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3495 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3497 /* Merge PTR in NEW_PTR. */
3498 DECL_FIELD_CONTEXT (array_field) = new_ptr;
3499 DECL_FIELD_CONTEXT (bounds_field) = new_ptr;
3500 for (t = new_ptr; t; last = t, t = TYPE_NEXT_VARIANT (t))
3501 TYPE_FIELDS (t) = TYPE_FIELDS (ptr);
3503 /* Chain PTR and its variants at the end. */
3504 TYPE_NEXT_VARIANT (last) = TYPE_MAIN_VARIANT (ptr);
3506 /* Now adjust them. */
3507 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3509 TYPE_MAIN_VARIANT (t) = new_ptr;
3510 SET_TYPE_UNCONSTRAINED_ARRAY (t, new_type);
3513 /* And show the original pointer NEW_PTR to the debugger. This is the
3514 counterpart of the equivalent processing in gnat_pushdecl when the
3515 unconstrained array type is frozen after access types to it. */
3516 if (TYPE_NAME (ptr) && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL)
3518 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3519 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3522 /* Now handle updating the allocation record, what the thin pointer
3523 points to. Update all pointers from the old record into the new
3524 one, update the type of the array field, and recompute the size. */
3525 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3526 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3527 = TREE_TYPE (TREE_TYPE (array_field));
3529 /* The size recomputation needs to account for alignment constraints, so
3530 we let layout_type work it out. This will reset the field offsets to
3531 what they would be in a regular record, so we shift them back to what
3532 we want them to be for a thin pointer designated type afterwards. */
3533 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = NULL_TREE;
3534 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = NULL_TREE;
3535 TYPE_SIZE (new_obj_rec) = NULL_TREE;
3536 layout_type (new_obj_rec);
3537 shift_unc_components_for_thin_pointers (new_obj_rec);
3539 /* We are done, at last. */
3540 rest_of_record_type_compilation (ptr);
3544 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3545 unconstrained one. This involves making or finding a template. */
3548 convert_to_fat_pointer (tree type, tree expr)
3550 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3551 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3552 tree etype = TREE_TYPE (expr);
3555 /* If EXPR is null, make a fat pointer that contains null pointers to the
3556 template and array. */
3557 if (integer_zerop (expr))
3559 gnat_build_constructor
3561 tree_cons (TYPE_FIELDS (type),
3562 convert (p_array_type, expr),
3563 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3564 convert (build_pointer_type (template_type),
3568 /* If EXPR is a thin pointer, make template and data from the record.. */
3569 else if (TYPE_IS_THIN_POINTER_P (etype))
3571 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3573 expr = gnat_protect_expr (expr);
3574 if (TREE_CODE (expr) == ADDR_EXPR)
3575 expr = TREE_OPERAND (expr, 0);
3577 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3579 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3580 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3581 build_component_ref (expr, NULL_TREE,
3582 TREE_CHAIN (fields), false));
3585 /* Otherwise, build the constructor for the template. */
3587 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3589 /* The final result is a constructor for the fat pointer.
3591 If EXPR is an argument of a foreign convention subprogram, the type it
3592 points to is directly the component type. In this case, the expression
3593 type may not match the corresponding FIELD_DECL type at this point, so we
3594 call "convert" here to fix that up if necessary. This type consistency is
3595 required, for instance because it ensures that possible later folding of
3596 COMPONENT_REFs against this constructor always yields something of the
3597 same type as the initial reference.
3599 Note that the call to "build_template" above is still fine because it
3600 will only refer to the provided TEMPLATE_TYPE in this case. */
3602 gnat_build_constructor
3604 tree_cons (TYPE_FIELDS (type),
3605 convert (p_array_type, expr),
3606 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3607 build_unary_op (ADDR_EXPR, NULL_TREE,
3612 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3613 is something that is a fat pointer, so convert to it first if it EXPR
3614 is not already a fat pointer. */
3617 convert_to_thin_pointer (tree type, tree expr)
3619 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3621 = convert_to_fat_pointer
3622 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3624 /* We get the pointer to the data and use a NOP_EXPR to make it the
3626 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3628 expr = build1 (NOP_EXPR, type, expr);
3633 /* Create an expression whose value is that of EXPR,
3634 converted to type TYPE. The TREE_TYPE of the value
3635 is always TYPE. This function implements all reasonable
3636 conversions; callers should filter out those that are
3637 not permitted by the language being compiled. */
3640 convert (tree type, tree expr)
3642 tree etype = TREE_TYPE (expr);
3643 enum tree_code ecode = TREE_CODE (etype);
3644 enum tree_code code = TREE_CODE (type);
3646 /* If the expression is already of the right type, we are done. */
3650 /* If both input and output have padding and are of variable size, do this
3651 as an unchecked conversion. Likewise if one is a mere variant of the
3652 other, so we avoid a pointless unpad/repad sequence. */
3653 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3654 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3655 && (!TREE_CONSTANT (TYPE_SIZE (type))
3656 || !TREE_CONSTANT (TYPE_SIZE (etype))
3657 || gnat_types_compatible_p (type, etype)
3658 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3659 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3662 /* If the output type has padding, convert to the inner type and make a
3663 constructor to build the record, unless a variable size is involved. */
3664 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3666 /* If we previously converted from another type and our type is
3667 of variable size, remove the conversion to avoid the need for
3668 variable-sized temporaries. Likewise for a conversion between
3669 original and packable version. */
3670 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3671 && (!TREE_CONSTANT (TYPE_SIZE (type))
3672 || (ecode == RECORD_TYPE
3673 && TYPE_NAME (etype)
3674 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3675 expr = TREE_OPERAND (expr, 0);
3677 /* If we are just removing the padding from expr, convert the original
3678 object if we have variable size in order to avoid the need for some
3679 variable-sized temporaries. Likewise if the padding is a variant
3680 of the other, so we avoid a pointless unpad/repad sequence. */
3681 if (TREE_CODE (expr) == COMPONENT_REF
3682 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3683 && (!TREE_CONSTANT (TYPE_SIZE (type))
3684 || gnat_types_compatible_p (type,
3685 TREE_TYPE (TREE_OPERAND (expr, 0)))
3686 || (ecode == RECORD_TYPE
3687 && TYPE_NAME (etype)
3688 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3689 return convert (type, TREE_OPERAND (expr, 0));
3691 /* If the inner type is of self-referential size and the expression type
3692 is a record, do this as an unchecked conversion. But first pad the
3693 expression if possible to have the same size on both sides. */
3694 if (ecode == RECORD_TYPE
3695 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3697 if (TREE_CONSTANT (TYPE_SIZE (etype)))
3698 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3699 false, false, false, true), expr);
3700 return unchecked_convert (type, expr, false);
3703 /* If we are converting between array types with variable size, do the
3704 final conversion as an unchecked conversion, again to avoid the need
3705 for some variable-sized temporaries. If valid, this conversion is
3706 very likely purely technical and without real effects. */
3707 if (ecode == ARRAY_TYPE
3708 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3709 && !TREE_CONSTANT (TYPE_SIZE (etype))
3710 && !TREE_CONSTANT (TYPE_SIZE (type)))
3711 return unchecked_convert (type,
3712 convert (TREE_TYPE (TYPE_FIELDS (type)),
3717 gnat_build_constructor (type,
3718 tree_cons (TYPE_FIELDS (type),
3720 (TYPE_FIELDS (type)),
3725 /* If the input type has padding, remove it and convert to the output type.
3726 The conditions ordering is arranged to ensure that the output type is not
3727 a padding type here, as it is not clear whether the conversion would
3728 always be correct if this was to happen. */
3729 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3733 /* If we have just converted to this padded type, just get the
3734 inner expression. */
3735 if (TREE_CODE (expr) == CONSTRUCTOR
3736 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3737 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3738 == TYPE_FIELDS (etype))
3740 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3742 /* Otherwise, build an explicit component reference. */
3745 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3747 return convert (type, unpadded);
3750 /* If the input is a biased type, adjust first. */
3751 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3752 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3753 fold_convert (TREE_TYPE (etype),
3755 TYPE_MIN_VALUE (etype)));
3757 /* If the input is a justified modular type, we need to extract the actual
3758 object before converting it to any other type with the exceptions of an
3759 unconstrained array or of a mere type variant. It is useful to avoid the
3760 extraction and conversion in the type variant case because it could end
3761 up replacing a VAR_DECL expr by a constructor and we might be about the
3762 take the address of the result. */
3763 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3764 && code != UNCONSTRAINED_ARRAY_TYPE
3765 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3766 return convert (type, build_component_ref (expr, NULL_TREE,
3767 TYPE_FIELDS (etype), false));
3769 /* If converting to a type that contains a template, convert to the data
3770 type and then build the template. */
3771 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3773 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3775 /* If the source already has a template, get a reference to the
3776 associated array only, as we are going to rebuild a template
3777 for the target type anyway. */
3778 expr = maybe_unconstrained_array (expr);
3781 gnat_build_constructor
3783 tree_cons (TYPE_FIELDS (type),
3784 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3785 obj_type, NULL_TREE),
3786 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3787 convert (obj_type, expr), NULL_TREE)));
3790 /* There are some special cases of expressions that we process
3792 switch (TREE_CODE (expr))
3798 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3799 conversion in gnat_expand_expr. NULL_EXPR does not represent
3800 and actual value, so no conversion is needed. */
3801 expr = copy_node (expr);
3802 TREE_TYPE (expr) = type;
3806 /* If we are converting a STRING_CST to another constrained array type,
3807 just make a new one in the proper type. */
3808 if (code == ecode && AGGREGATE_TYPE_P (etype)
3809 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3810 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3812 expr = copy_node (expr);
3813 TREE_TYPE (expr) = type;
3819 /* If we are converting a VECTOR_CST to a mere variant type, just make
3820 a new one in the proper type. */
3821 if (code == ecode && gnat_types_compatible_p (type, etype))
3823 expr = copy_node (expr);
3824 TREE_TYPE (expr) = type;
3829 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3830 a new one in the proper type. */
3831 if (code == ecode && gnat_types_compatible_p (type, etype))
3833 expr = copy_node (expr);
3834 TREE_TYPE (expr) = type;
3838 /* Likewise for a conversion between original and packable version, or
3839 conversion between types of the same size and with the same list of
3840 fields, but we have to work harder to preserve type consistency. */
3842 && code == RECORD_TYPE
3843 && (TYPE_NAME (type) == TYPE_NAME (etype)
3844 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3847 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3848 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3849 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3850 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3851 unsigned HOST_WIDE_INT idx;
3854 /* Whether we need to clear TREE_CONSTANT et al. on the output
3855 constructor when we convert in place. */
3856 bool clear_constant = false;
3858 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3860 constructor_elt *elt;
3861 /* We expect only simple constructors. */
3862 if (!SAME_FIELD_P (index, efield))
3864 /* The field must be the same. */
3865 if (!SAME_FIELD_P (efield, field))
3867 elt = VEC_quick_push (constructor_elt, v, NULL);
3869 elt->value = convert (TREE_TYPE (field), value);
3871 /* If packing has made this field a bitfield and the input
3872 value couldn't be emitted statically any more, we need to
3873 clear TREE_CONSTANT on our output. */
3875 && TREE_CONSTANT (expr)
3876 && !CONSTRUCTOR_BITFIELD_P (efield)
3877 && CONSTRUCTOR_BITFIELD_P (field)
3878 && !initializer_constant_valid_for_bitfield_p (value))
3879 clear_constant = true;
3881 efield = TREE_CHAIN (efield);
3882 field = TREE_CHAIN (field);
3885 /* If we have been able to match and convert all the input fields
3886 to their output type, convert in place now. We'll fallback to a
3887 view conversion downstream otherwise. */
3890 expr = copy_node (expr);
3891 TREE_TYPE (expr) = type;
3892 CONSTRUCTOR_ELTS (expr) = v;
3894 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3899 /* Likewise for a conversion between array type and vector type with a
3900 compatible representative array. */
3901 else if (code == VECTOR_TYPE
3902 && ecode == ARRAY_TYPE
3903 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3906 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3907 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3908 VEC(constructor_elt,gc) *v;
3909 unsigned HOST_WIDE_INT ix;
3912 /* Build a VECTOR_CST from a *constant* array constructor. */
3913 if (TREE_CONSTANT (expr))
3915 bool constant_p = true;
3917 /* Iterate through elements and check if all constructor
3918 elements are *_CSTs. */
3919 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3920 if (!CONSTANT_CLASS_P (value))
3927 return build_vector_from_ctor (type,
3928 CONSTRUCTOR_ELTS (expr));
3931 /* Otherwise, build a regular vector constructor. */
3932 v = VEC_alloc (constructor_elt, gc, len);
3933 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3935 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3936 elt->index = NULL_TREE;
3939 expr = copy_node (expr);
3940 TREE_TYPE (expr) = type;
3941 CONSTRUCTOR_ELTS (expr) = v;
3946 case UNCONSTRAINED_ARRAY_REF:
3947 /* Convert this to the type of the inner array by getting the address of
3948 the array from the template. */
3949 expr = TREE_OPERAND (expr, 0);
3950 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3951 build_component_ref (expr, NULL_TREE,
3955 etype = TREE_TYPE (expr);
3956 ecode = TREE_CODE (etype);
3959 case VIEW_CONVERT_EXPR:
3961 /* GCC 4.x is very sensitive to type consistency overall, and view
3962 conversions thus are very frequent. Even though just "convert"ing
3963 the inner operand to the output type is fine in most cases, it
3964 might expose unexpected input/output type mismatches in special
3965 circumstances so we avoid such recursive calls when we can. */
3966 tree op0 = TREE_OPERAND (expr, 0);
3968 /* If we are converting back to the original type, we can just
3969 lift the input conversion. This is a common occurrence with
3970 switches back-and-forth amongst type variants. */
3971 if (type == TREE_TYPE (op0))
3974 /* Otherwise, if we're converting between two aggregate or vector
3975 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
3976 target type in place or to just convert the inner expression. */
3977 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3978 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
3980 /* If we are converting between mere variants, we can just
3981 substitute the VIEW_CONVERT_EXPR in place. */
3982 if (gnat_types_compatible_p (type, etype))
3983 return build1 (VIEW_CONVERT_EXPR, type, op0);
3985 /* Otherwise, we may just bypass the input view conversion unless
3986 one of the types is a fat pointer, which is handled by
3987 specialized code below which relies on exact type matching. */
3988 else if (!TYPE_IS_FAT_POINTER_P (type)
3989 && !TYPE_IS_FAT_POINTER_P (etype))
3990 return convert (type, op0);
3999 /* Check for converting to a pointer to an unconstrained array. */
4000 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4001 return convert_to_fat_pointer (type, expr);
4003 /* If we are converting between two aggregate or vector types that are mere
4004 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4005 to a vector type from its representative array type. */
4006 else if ((code == ecode
4007 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4008 && gnat_types_compatible_p (type, etype))
4009 || (code == VECTOR_TYPE
4010 && ecode == ARRAY_TYPE
4011 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4013 return build1 (VIEW_CONVERT_EXPR, type, expr);
4015 /* If we are converting between tagged types, try to upcast properly. */
4016 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4017 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4019 tree child_etype = etype;
4021 tree field = TYPE_FIELDS (child_etype);
4022 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4023 return build_component_ref (expr, NULL_TREE, field, false);
4024 child_etype = TREE_TYPE (field);
4025 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4028 /* In all other cases of related types, make a NOP_EXPR. */
4029 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4030 return fold_convert (type, expr);
4035 return fold_build1 (CONVERT_EXPR, type, expr);
4038 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4039 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4040 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4041 return unchecked_convert (type, expr, false);
4042 else if (TYPE_BIASED_REPRESENTATION_P (type))
4043 return fold_convert (type,
4044 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4045 convert (TREE_TYPE (type), expr),
4046 TYPE_MIN_VALUE (type)));
4048 /* ... fall through ... */
4052 /* If we are converting an additive expression to an integer type
4053 with lower precision, be wary of the optimization that can be
4054 applied by convert_to_integer. There are 2 problematic cases:
4055 - if the first operand was originally of a biased type,
4056 because we could be recursively called to convert it
4057 to an intermediate type and thus rematerialize the
4058 additive operator endlessly,
4059 - if the expression contains a placeholder, because an
4060 intermediate conversion that changes the sign could
4061 be inserted and thus introduce an artificial overflow
4062 at compile time when the placeholder is substituted. */
4063 if (code == INTEGER_TYPE
4064 && ecode == INTEGER_TYPE
4065 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4066 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4068 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4070 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4071 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4072 || CONTAINS_PLACEHOLDER_P (expr))
4073 return build1 (NOP_EXPR, type, expr);
4076 return fold (convert_to_integer (type, expr));
4079 case REFERENCE_TYPE:
4080 /* If converting between two pointers to records denoting
4081 both a template and type, adjust if needed to account
4082 for any differing offsets, since one might be negative. */
4083 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4086 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4087 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4089 = size_binop (CEIL_DIV_EXPR, bit_diff, sbitsize_unit_node);
4090 expr = build1 (NOP_EXPR, type, expr);
4091 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4092 if (integer_zerop (byte_diff))
4095 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4096 fold (convert (sizetype, byte_diff)));
4099 /* If converting to a thin pointer, handle specially. */
4100 if (TYPE_IS_THIN_POINTER_P (type)
4101 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4102 return convert_to_thin_pointer (type, expr);
4104 /* If converting fat pointer to normal pointer, get the pointer to the
4105 array and then convert it. */
4106 else if (TYPE_IS_FAT_POINTER_P (etype))
4108 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4110 return fold (convert_to_pointer (type, expr));
4113 return fold (convert_to_real (type, expr));
4116 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4118 gnat_build_constructor
4119 (type, tree_cons (TYPE_FIELDS (type),
4120 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4123 /* ... fall through ... */
4126 /* In these cases, assume the front-end has validated the conversion.
4127 If the conversion is valid, it will be a bit-wise conversion, so
4128 it can be viewed as an unchecked conversion. */
4129 return unchecked_convert (type, expr, false);
4132 /* This is a either a conversion between a tagged type and some
4133 subtype, which we have to mark as a UNION_TYPE because of
4134 overlapping fields or a conversion of an Unchecked_Union. */
4135 return unchecked_convert (type, expr, false);
4137 case UNCONSTRAINED_ARRAY_TYPE:
4138 /* If the input is a VECTOR_TYPE, convert to the representative
4139 array type first. */
4140 if (ecode == VECTOR_TYPE)
4142 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4143 etype = TREE_TYPE (expr);
4144 ecode = TREE_CODE (etype);
4147 /* If EXPR is a constrained array, take its address, convert it to a
4148 fat pointer, and then dereference it. Likewise if EXPR is a
4149 record containing both a template and a constrained array.
4150 Note that a record representing a justified modular type
4151 always represents a packed constrained array. */
4152 if (ecode == ARRAY_TYPE
4153 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4154 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4155 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4158 (INDIRECT_REF, NULL_TREE,
4159 convert_to_fat_pointer (TREE_TYPE (type),
4160 build_unary_op (ADDR_EXPR,
4163 /* Do something very similar for converting one unconstrained
4164 array to another. */
4165 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4167 build_unary_op (INDIRECT_REF, NULL_TREE,
4168 convert (TREE_TYPE (type),
4169 build_unary_op (ADDR_EXPR,
4175 return fold (convert_to_complex (type, expr));
4182 /* Remove all conversions that are done in EXP. This includes converting
4183 from a padded type or to a justified modular type. If TRUE_ADDRESS
4184 is true, always return the address of the containing object even if
4185 the address is not bit-aligned. */
4188 remove_conversions (tree exp, bool true_address)
4190 switch (TREE_CODE (exp))
4194 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4195 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4197 remove_conversions (VEC_index (constructor_elt,
4198 CONSTRUCTOR_ELTS (exp), 0)->value,
4203 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4204 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4207 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4209 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4218 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4219 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4220 likewise return an expression pointing to the underlying array. */
4223 maybe_unconstrained_array (tree exp)
4225 enum tree_code code = TREE_CODE (exp);
4228 switch (TREE_CODE (TREE_TYPE (exp)))
4230 case UNCONSTRAINED_ARRAY_TYPE:
4231 if (code == UNCONSTRAINED_ARRAY_REF)
4233 new_exp = TREE_OPERAND (exp, 0);
4235 = build_unary_op (INDIRECT_REF, NULL_TREE,
4236 build_component_ref (new_exp, NULL_TREE,
4238 (TREE_TYPE (new_exp)),
4240 TREE_READONLY (new_exp) = TREE_READONLY (exp);
4244 else if (code == NULL_EXPR)
4245 return build1 (NULL_EXPR,
4246 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4247 (TREE_TYPE (TREE_TYPE (exp))))),
4248 TREE_OPERAND (exp, 0));
4251 /* If this is a padded type, convert to the unpadded type and see if
4252 it contains a template. */
4253 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4255 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4256 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4257 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4259 build_component_ref (new_exp, NULL_TREE,
4261 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4264 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4266 build_component_ref (exp, NULL_TREE,
4267 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))),
4278 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4279 TYPE_REPRESENTATIVE_ARRAY. */
4282 maybe_vector_array (tree exp)
4284 tree etype = TREE_TYPE (exp);
4286 if (VECTOR_TYPE_P (etype))
4287 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4292 /* Return true if EXPR is an expression that can be folded as an operand
4293 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4296 can_fold_for_view_convert_p (tree expr)
4300 /* The folder will fold NOP_EXPRs between integral types with the same
4301 precision (in the middle-end's sense). We cannot allow it if the
4302 types don't have the same precision in the Ada sense as well. */
4303 if (TREE_CODE (expr) != NOP_EXPR)
4306 t1 = TREE_TYPE (expr);
4307 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4309 /* Defer to the folder for non-integral conversions. */
4310 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4313 /* Only fold conversions that preserve both precisions. */
4314 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4315 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4321 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4322 If NOTRUNC_P is true, truncation operations should be suppressed.
4324 Special care is required with (source or target) integral types whose
4325 precision is not equal to their size, to make sure we fetch or assign
4326 the value bits whose location might depend on the endianness, e.g.
4328 Rmsize : constant := 8;
4329 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4331 type Bit_Array is array (1 .. Rmsize) of Boolean;
4332 pragma Pack (Bit_Array);
4334 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4336 Value : Int := 2#1000_0001#;
4337 Vbits : Bit_Array := To_Bit_Array (Value);
4339 we expect the 8 bits at Vbits'Address to always contain Value, while
4340 their original location depends on the endianness, at Value'Address
4341 on a little-endian architecture but not on a big-endian one. */
4344 unchecked_convert (tree type, tree expr, bool notrunc_p)
4346 tree etype = TREE_TYPE (expr);
4347 enum tree_code ecode = TREE_CODE (etype);
4348 enum tree_code code = TREE_CODE (type);
4350 /* If the expression is already of the right type, we are done. */
4354 /* If both types types are integral just do a normal conversion.
4355 Likewise for a conversion to an unconstrained array. */
4356 if ((((INTEGRAL_TYPE_P (type)
4357 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4358 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4359 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4360 && ((INTEGRAL_TYPE_P (etype)
4361 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4362 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4363 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4364 || code == UNCONSTRAINED_ARRAY_TYPE)
4366 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4368 tree ntype = copy_type (etype);
4369 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4370 TYPE_MAIN_VARIANT (ntype) = ntype;
4371 expr = build1 (NOP_EXPR, ntype, expr);
4374 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4376 tree rtype = copy_type (type);
4377 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4378 TYPE_MAIN_VARIANT (rtype) = rtype;
4379 expr = convert (rtype, expr);
4380 expr = build1 (NOP_EXPR, type, expr);
4383 expr = convert (type, expr);
4386 /* If we are converting to an integral type whose precision is not equal
4387 to its size, first unchecked convert to a record that contains an
4388 object of the output type. Then extract the field. */
4389 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4390 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4391 GET_MODE_BITSIZE (TYPE_MODE (type))))
4393 tree rec_type = make_node (RECORD_TYPE);
4394 tree field = create_field_decl (get_identifier ("OBJ"), type, rec_type,
4395 NULL_TREE, NULL_TREE, 1, 0);
4397 TYPE_FIELDS (rec_type) = field;
4398 layout_type (rec_type);
4400 expr = unchecked_convert (rec_type, expr, notrunc_p);
4401 expr = build_component_ref (expr, NULL_TREE, field, false);
4404 /* Similarly if we are converting from an integral type whose precision
4405 is not equal to its size. */
4406 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4407 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4408 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4410 tree rec_type = make_node (RECORD_TYPE);
4411 tree field = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4412 NULL_TREE, NULL_TREE, 1, 0);
4414 TYPE_FIELDS (rec_type) = field;
4415 layout_type (rec_type);
4417 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4418 expr = unchecked_convert (type, expr, notrunc_p);
4421 /* We have a special case when we are converting between two unconstrained
4422 array types. In that case, take the address, convert the fat pointer
4423 types, and dereference. */
4424 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4425 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4426 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4427 build_unary_op (ADDR_EXPR, NULL_TREE,
4430 /* Another special case is when we are converting to a vector type from its
4431 representative array type; this a regular conversion. */
4432 else if (code == VECTOR_TYPE
4433 && ecode == ARRAY_TYPE
4434 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4436 expr = convert (type, expr);
4440 expr = maybe_unconstrained_array (expr);
4441 etype = TREE_TYPE (expr);
4442 ecode = TREE_CODE (etype);
4443 if (can_fold_for_view_convert_p (expr))
4444 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4446 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4449 /* If the result is an integral type whose precision is not equal to its
4450 size, sign- or zero-extend the result. We need not do this if the input
4451 is an integral type of the same precision and signedness or if the output
4452 is a biased type or if both the input and output are unsigned. */
4454 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4455 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4456 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4457 GET_MODE_BITSIZE (TYPE_MODE (type)))
4458 && !(INTEGRAL_TYPE_P (etype)
4459 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4460 && operand_equal_p (TYPE_RM_SIZE (type),
4461 (TYPE_RM_SIZE (etype) != 0
4462 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4464 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4467 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4469 = convert (base_type,
4470 size_binop (MINUS_EXPR,
4472 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4473 TYPE_RM_SIZE (type)));
4476 build_binary_op (RSHIFT_EXPR, base_type,
4477 build_binary_op (LSHIFT_EXPR, base_type,
4478 convert (base_type, expr),
4483 /* An unchecked conversion should never raise Constraint_Error. The code
4484 below assumes that GCC's conversion routines overflow the same way that
4485 the underlying hardware does. This is probably true. In the rare case
4486 when it is false, we can rely on the fact that such conversions are
4487 erroneous anyway. */
4488 if (TREE_CODE (expr) == INTEGER_CST)
4489 TREE_OVERFLOW (expr) = 0;
4491 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4492 show no longer constant. */
4493 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4494 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4496 TREE_CONSTANT (expr) = 0;
4501 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4502 the latter being a record type as predicated by Is_Record_Type. */
4505 tree_code_for_record_type (Entity_Id gnat_type)
4507 Node_Id component_list
4508 = Component_List (Type_Definition
4510 (Implementation_Base_Type (gnat_type))));
4513 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4514 we have a non-discriminant field outside a variant. In either case,
4515 it's a RECORD_TYPE. */
4517 if (!Is_Unchecked_Union (gnat_type))
4520 for (component = First_Non_Pragma (Component_Items (component_list));
4521 Present (component);
4522 component = Next_Non_Pragma (component))
4523 if (Ekind (Defining_Entity (component)) == E_Component)
4529 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4530 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4531 according to the presence of an alignment clause on the type or, if it
4532 is an array, on the component type. */
4535 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4537 gnat_type = Underlying_Type (gnat_type);
4539 *align_clause = Present (Alignment_Clause (gnat_type));
4541 if (Is_Array_Type (gnat_type))
4543 gnat_type = Underlying_Type (Component_Type (gnat_type));
4544 if (Present (Alignment_Clause (gnat_type)))
4545 *align_clause = true;
4548 if (!Is_Floating_Point_Type (gnat_type))
4551 if (UI_To_Int (Esize (gnat_type)) != 64)
4557 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4558 size is greater or equal to 64 bits, or an array of such a type. Set
4559 ALIGN_CLAUSE according to the presence of an alignment clause on the
4560 type or, if it is an array, on the component type. */
4563 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4565 gnat_type = Underlying_Type (gnat_type);
4567 *align_clause = Present (Alignment_Clause (gnat_type));
4569 if (Is_Array_Type (gnat_type))
4571 gnat_type = Underlying_Type (Component_Type (gnat_type));
4572 if (Present (Alignment_Clause (gnat_type)))
4573 *align_clause = true;
4576 if (!Is_Scalar_Type (gnat_type))
4579 if (UI_To_Int (Esize (gnat_type)) < 64)
4585 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4586 component of an aggregate type. */
4589 type_for_nonaliased_component_p (tree gnu_type)
4591 /* If the type is passed by reference, we may have pointers to the
4592 component so it cannot be made non-aliased. */
4593 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4596 /* We used to say that any component of aggregate type is aliased
4597 because the front-end may take 'Reference of it. The front-end
4598 has been enhanced in the meantime so as to use a renaming instead
4599 in most cases, but the back-end can probably take the address of
4600 such a component too so we go for the conservative stance.
4602 For instance, we might need the address of any array type, even
4603 if normally passed by copy, to construct a fat pointer if the
4604 component is used as an actual for an unconstrained formal.
4606 Likewise for record types: even if a specific record subtype is
4607 passed by copy, the parent type might be passed by ref (e.g. if
4608 it's of variable size) and we might take the address of a child
4609 component to pass to a parent formal. We have no way to check
4610 for such conditions here. */
4611 if (AGGREGATE_TYPE_P (gnu_type))
4617 /* Perform final processing on global variables. */
4620 gnat_write_global_declarations (void)
4622 /* Proceed to optimize and emit assembly.
4623 FIXME: shouldn't be the front end's responsibility to call this. */
4624 cgraph_finalize_compilation_unit ();
4626 /* Emit debug info for all global declarations. */
4627 emit_debug_global_declarations (VEC_address (tree, global_decls),
4628 VEC_length (tree, global_decls));
4631 /* ************************************************************************
4632 * * GCC builtins support *
4633 * ************************************************************************ */
4635 /* The general scheme is fairly simple:
4637 For each builtin function/type to be declared, gnat_install_builtins calls
4638 internal facilities which eventually get to gnat_push_decl, which in turn
4639 tracks the so declared builtin function decls in the 'builtin_decls' global
4640 datastructure. When an Intrinsic subprogram declaration is processed, we
4641 search this global datastructure to retrieve the associated BUILT_IN DECL
4644 /* Search the chain of currently available builtin declarations for a node
4645 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4646 found, if any, or NULL_TREE otherwise. */
4648 builtin_decl_for (tree name)
4653 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4654 if (DECL_NAME (decl) == name)
4660 /* The code below eventually exposes gnat_install_builtins, which declares
4661 the builtin types and functions we might need, either internally or as
4662 user accessible facilities.
4664 ??? This is a first implementation shot, still in rough shape. It is
4665 heavily inspired from the "C" family implementation, with chunks copied
4666 verbatim from there.
4668 Two obvious TODO candidates are
4669 o Use a more efficient name/decl mapping scheme
4670 o Devise a middle-end infrastructure to avoid having to copy
4671 pieces between front-ends. */
4673 /* ----------------------------------------------------------------------- *
4674 * BUILTIN ELEMENTARY TYPES *
4675 * ----------------------------------------------------------------------- */
4677 /* Standard data types to be used in builtin argument declarations. */
4681 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4683 CTI_CONST_STRING_TYPE,
4688 static tree c_global_trees[CTI_MAX];
4690 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4691 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4692 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4694 /* ??? In addition some attribute handlers, we currently don't support a
4695 (small) number of builtin-types, which in turns inhibits support for a
4696 number of builtin functions. */
4697 #define wint_type_node void_type_node
4698 #define intmax_type_node void_type_node
4699 #define uintmax_type_node void_type_node
4701 /* Build the void_list_node (void_type_node having been created). */
4704 build_void_list_node (void)
4706 tree t = build_tree_list (NULL_TREE, void_type_node);
4710 /* Used to help initialize the builtin-types.def table. When a type of
4711 the correct size doesn't exist, use error_mark_node instead of NULL.
4712 The later results in segfaults even when a decl using the type doesn't
4716 builtin_type_for_size (int size, bool unsignedp)
4718 tree type = gnat_type_for_size (size, unsignedp);
4719 return type ? type : error_mark_node;
4722 /* Build/push the elementary type decls that builtin functions/types
4726 install_builtin_elementary_types (void)
4728 signed_size_type_node = gnat_signed_type (size_type_node);
4729 pid_type_node = integer_type_node;
4730 void_list_node = build_void_list_node ();
4732 string_type_node = build_pointer_type (char_type_node);
4733 const_string_type_node
4734 = build_pointer_type (build_qualified_type
4735 (char_type_node, TYPE_QUAL_CONST));
4738 /* ----------------------------------------------------------------------- *
4739 * BUILTIN FUNCTION TYPES *
4740 * ----------------------------------------------------------------------- */
4742 /* Now, builtin function types per se. */
4746 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4747 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4748 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4749 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4750 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4751 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4752 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4753 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4754 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4755 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4756 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4757 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4758 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4759 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4760 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4762 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4763 #include "builtin-types.def"
4764 #undef DEF_PRIMITIVE_TYPE
4765 #undef DEF_FUNCTION_TYPE_0
4766 #undef DEF_FUNCTION_TYPE_1
4767 #undef DEF_FUNCTION_TYPE_2
4768 #undef DEF_FUNCTION_TYPE_3
4769 #undef DEF_FUNCTION_TYPE_4
4770 #undef DEF_FUNCTION_TYPE_5
4771 #undef DEF_FUNCTION_TYPE_6
4772 #undef DEF_FUNCTION_TYPE_7
4773 #undef DEF_FUNCTION_TYPE_VAR_0
4774 #undef DEF_FUNCTION_TYPE_VAR_1
4775 #undef DEF_FUNCTION_TYPE_VAR_2
4776 #undef DEF_FUNCTION_TYPE_VAR_3
4777 #undef DEF_FUNCTION_TYPE_VAR_4
4778 #undef DEF_FUNCTION_TYPE_VAR_5
4779 #undef DEF_POINTER_TYPE
4783 typedef enum c_builtin_type builtin_type;
4785 /* A temporary array used in communication with def_fn_type. */
4786 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4788 /* A helper function for install_builtin_types. Build function type
4789 for DEF with return type RET and N arguments. If VAR is true, then the
4790 function should be variadic after those N arguments.
4792 Takes special care not to ICE if any of the types involved are
4793 error_mark_node, which indicates that said type is not in fact available
4794 (see builtin_type_for_size). In which case the function type as a whole
4795 should be error_mark_node. */
4798 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4800 tree args = NULL, t;
4805 for (i = 0; i < n; ++i)
4807 builtin_type a = (builtin_type) va_arg (list, int);
4808 t = builtin_types[a];
4809 if (t == error_mark_node)
4811 args = tree_cons (NULL_TREE, t, args);
4815 args = nreverse (args);
4817 args = chainon (args, void_list_node);
4819 t = builtin_types[ret];
4820 if (t == error_mark_node)
4822 t = build_function_type (t, args);
4825 builtin_types[def] = t;
4828 /* Build the builtin function types and install them in the builtin_types
4829 array for later use in builtin function decls. */
4832 install_builtin_function_types (void)
4834 tree va_list_ref_type_node;
4835 tree va_list_arg_type_node;
4837 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4839 va_list_arg_type_node = va_list_ref_type_node =
4840 build_pointer_type (TREE_TYPE (va_list_type_node));
4844 va_list_arg_type_node = va_list_type_node;
4845 va_list_ref_type_node = build_reference_type (va_list_type_node);
4848 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4849 builtin_types[ENUM] = VALUE;
4850 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4851 def_fn_type (ENUM, RETURN, 0, 0);
4852 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4853 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4854 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4855 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4856 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4857 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4858 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4859 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4860 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4861 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4862 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4864 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4865 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4867 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4868 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4869 def_fn_type (ENUM, RETURN, 1, 0);
4870 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4871 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4872 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4873 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4874 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4875 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4876 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4877 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4878 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4879 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4880 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4881 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4883 #include "builtin-types.def"
4885 #undef DEF_PRIMITIVE_TYPE
4886 #undef DEF_FUNCTION_TYPE_1
4887 #undef DEF_FUNCTION_TYPE_2
4888 #undef DEF_FUNCTION_TYPE_3
4889 #undef DEF_FUNCTION_TYPE_4
4890 #undef DEF_FUNCTION_TYPE_5
4891 #undef DEF_FUNCTION_TYPE_6
4892 #undef DEF_FUNCTION_TYPE_VAR_0
4893 #undef DEF_FUNCTION_TYPE_VAR_1
4894 #undef DEF_FUNCTION_TYPE_VAR_2
4895 #undef DEF_FUNCTION_TYPE_VAR_3
4896 #undef DEF_FUNCTION_TYPE_VAR_4
4897 #undef DEF_FUNCTION_TYPE_VAR_5
4898 #undef DEF_POINTER_TYPE
4899 builtin_types[(int) BT_LAST] = NULL_TREE;
4902 /* ----------------------------------------------------------------------- *
4903 * BUILTIN ATTRIBUTES *
4904 * ----------------------------------------------------------------------- */
4906 enum built_in_attribute
4908 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4909 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4910 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4911 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4912 #include "builtin-attrs.def"
4913 #undef DEF_ATTR_NULL_TREE
4915 #undef DEF_ATTR_IDENT
4916 #undef DEF_ATTR_TREE_LIST
4920 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4923 install_builtin_attributes (void)
4925 /* Fill in the built_in_attributes array. */
4926 #define DEF_ATTR_NULL_TREE(ENUM) \
4927 built_in_attributes[(int) ENUM] = NULL_TREE;
4928 #define DEF_ATTR_INT(ENUM, VALUE) \
4929 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4930 #define DEF_ATTR_IDENT(ENUM, STRING) \
4931 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4932 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4933 built_in_attributes[(int) ENUM] \
4934 = tree_cons (built_in_attributes[(int) PURPOSE], \
4935 built_in_attributes[(int) VALUE], \
4936 built_in_attributes[(int) CHAIN]);
4937 #include "builtin-attrs.def"
4938 #undef DEF_ATTR_NULL_TREE
4940 #undef DEF_ATTR_IDENT
4941 #undef DEF_ATTR_TREE_LIST
4944 /* Handle a "const" attribute; arguments as in
4945 struct attribute_spec.handler. */
4948 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4949 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4952 if (TREE_CODE (*node) == FUNCTION_DECL)
4953 TREE_READONLY (*node) = 1;
4955 *no_add_attrs = true;
4960 /* Handle a "nothrow" attribute; arguments as in
4961 struct attribute_spec.handler. */
4964 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4965 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4968 if (TREE_CODE (*node) == FUNCTION_DECL)
4969 TREE_NOTHROW (*node) = 1;
4971 *no_add_attrs = true;
4976 /* Handle a "pure" attribute; arguments as in
4977 struct attribute_spec.handler. */
4980 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4981 int ARG_UNUSED (flags), bool *no_add_attrs)
4983 if (TREE_CODE (*node) == FUNCTION_DECL)
4984 DECL_PURE_P (*node) = 1;
4985 /* ??? TODO: Support types. */
4988 warning (OPT_Wattributes, "%qs attribute ignored",
4989 IDENTIFIER_POINTER (name));
4990 *no_add_attrs = true;
4996 /* Handle a "no vops" attribute; arguments as in
4997 struct attribute_spec.handler. */
5000 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5001 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5002 bool *ARG_UNUSED (no_add_attrs))
5004 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5005 DECL_IS_NOVOPS (*node) = 1;
5009 /* Helper for nonnull attribute handling; fetch the operand number
5010 from the attribute argument list. */
5013 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5015 /* Verify the arg number is a constant. */
5016 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5017 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5020 *valp = TREE_INT_CST_LOW (arg_num_expr);
5024 /* Handle the "nonnull" attribute. */
5026 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5027 tree args, int ARG_UNUSED (flags),
5031 unsigned HOST_WIDE_INT attr_arg_num;
5033 /* If no arguments are specified, all pointer arguments should be
5034 non-null. Verify a full prototype is given so that the arguments
5035 will have the correct types when we actually check them later. */
5038 if (!TYPE_ARG_TYPES (type))
5040 error ("nonnull attribute without arguments on a non-prototype");
5041 *no_add_attrs = true;
5046 /* Argument list specified. Verify that each argument number references
5047 a pointer argument. */
5048 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5051 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5053 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5055 error ("nonnull argument has invalid operand number (argument %lu)",
5056 (unsigned long) attr_arg_num);
5057 *no_add_attrs = true;
5061 argument = TYPE_ARG_TYPES (type);
5064 for (ck_num = 1; ; ck_num++)
5066 if (!argument || ck_num == arg_num)
5068 argument = TREE_CHAIN (argument);
5072 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5074 error ("nonnull argument with out-of-range operand number "
5075 "(argument %lu, operand %lu)",
5076 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5077 *no_add_attrs = true;
5081 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5083 error ("nonnull argument references non-pointer operand "
5084 "(argument %lu, operand %lu)",
5085 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5086 *no_add_attrs = true;
5095 /* Handle a "sentinel" attribute. */
5098 handle_sentinel_attribute (tree *node, tree name, tree args,
5099 int ARG_UNUSED (flags), bool *no_add_attrs)
5101 tree params = TYPE_ARG_TYPES (*node);
5105 warning (OPT_Wattributes,
5106 "%qs attribute requires prototypes with named arguments",
5107 IDENTIFIER_POINTER (name));
5108 *no_add_attrs = true;
5112 while (TREE_CHAIN (params))
5113 params = TREE_CHAIN (params);
5115 if (VOID_TYPE_P (TREE_VALUE (params)))
5117 warning (OPT_Wattributes,
5118 "%qs attribute only applies to variadic functions",
5119 IDENTIFIER_POINTER (name));
5120 *no_add_attrs = true;
5126 tree position = TREE_VALUE (args);
5128 if (TREE_CODE (position) != INTEGER_CST)
5130 warning (0, "requested position is not an integer constant");
5131 *no_add_attrs = true;
5135 if (tree_int_cst_lt (position, integer_zero_node))
5137 warning (0, "requested position is less than zero");
5138 *no_add_attrs = true;
5146 /* Handle a "noreturn" attribute; arguments as in
5147 struct attribute_spec.handler. */
5150 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5151 int ARG_UNUSED (flags), bool *no_add_attrs)
5153 tree type = TREE_TYPE (*node);
5155 /* See FIXME comment in c_common_attribute_table. */
5156 if (TREE_CODE (*node) == FUNCTION_DECL)
5157 TREE_THIS_VOLATILE (*node) = 1;
5158 else if (TREE_CODE (type) == POINTER_TYPE
5159 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5161 = build_pointer_type
5162 (build_type_variant (TREE_TYPE (type),
5163 TYPE_READONLY (TREE_TYPE (type)), 1));
5166 warning (OPT_Wattributes, "%qs attribute ignored",
5167 IDENTIFIER_POINTER (name));
5168 *no_add_attrs = true;
5174 /* Handle a "malloc" attribute; arguments as in
5175 struct attribute_spec.handler. */
5178 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5179 int ARG_UNUSED (flags), bool *no_add_attrs)
5181 if (TREE_CODE (*node) == FUNCTION_DECL
5182 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5183 DECL_IS_MALLOC (*node) = 1;
5186 warning (OPT_Wattributes, "%qs attribute ignored",
5187 IDENTIFIER_POINTER (name));
5188 *no_add_attrs = true;
5194 /* Fake handler for attributes we don't properly support. */
5197 fake_attribute_handler (tree * ARG_UNUSED (node),
5198 tree ARG_UNUSED (name),
5199 tree ARG_UNUSED (args),
5200 int ARG_UNUSED (flags),
5201 bool * ARG_UNUSED (no_add_attrs))
5206 /* Handle a "type_generic" attribute. */
5209 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5210 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5211 bool * ARG_UNUSED (no_add_attrs))
5215 /* Ensure we have a function type. */
5216 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5218 params = TYPE_ARG_TYPES (*node);
5219 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5220 params = TREE_CHAIN (params);
5222 /* Ensure we have a variadic function. */
5223 gcc_assert (!params);
5228 /* Handle a "vector_size" attribute; arguments as in
5229 struct attribute_spec.handler. */
5232 handle_vector_size_attribute (tree *node, tree name, tree args,
5233 int ARG_UNUSED (flags),
5236 unsigned HOST_WIDE_INT vecsize, nunits;
5237 enum machine_mode orig_mode;
5238 tree type = *node, new_type, size;
5240 *no_add_attrs = true;
5242 size = TREE_VALUE (args);
5244 if (!host_integerp (size, 1))
5246 warning (OPT_Wattributes, "%qs attribute ignored",
5247 IDENTIFIER_POINTER (name));
5251 /* Get the vector size (in bytes). */
5252 vecsize = tree_low_cst (size, 1);
5254 /* We need to provide for vector pointers, vector arrays, and
5255 functions returning vectors. For example:
5257 __attribute__((vector_size(16))) short *foo;
5259 In this case, the mode is SI, but the type being modified is
5260 HI, so we need to look further. */
5262 while (POINTER_TYPE_P (type)
5263 || TREE_CODE (type) == FUNCTION_TYPE
5264 || TREE_CODE (type) == METHOD_TYPE
5265 || TREE_CODE (type) == ARRAY_TYPE
5266 || TREE_CODE (type) == OFFSET_TYPE)
5267 type = TREE_TYPE (type);
5269 /* Get the mode of the type being modified. */
5270 orig_mode = TYPE_MODE (type);
5272 if ((!INTEGRAL_TYPE_P (type)
5273 && !SCALAR_FLOAT_TYPE_P (type)
5274 && !FIXED_POINT_TYPE_P (type))
5275 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5276 && GET_MODE_CLASS (orig_mode) != MODE_INT
5277 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5278 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5279 || TREE_CODE (type) == BOOLEAN_TYPE)
5281 error ("invalid vector type for attribute %qs",
5282 IDENTIFIER_POINTER (name));
5286 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5288 error ("vector size not an integral multiple of component size");
5294 error ("zero vector size");
5298 /* Calculate how many units fit in the vector. */
5299 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5300 if (nunits & (nunits - 1))
5302 error ("number of components of the vector not a power of two");
5306 new_type = build_vector_type (type, nunits);
5308 /* Build back pointers if needed. */
5309 *node = reconstruct_complex_type (*node, new_type);
5314 /* Handle a "vector_type" attribute; arguments as in
5315 struct attribute_spec.handler. */
5318 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5319 int ARG_UNUSED (flags),
5322 /* Vector representative type and size. */
5323 tree rep_type = *node;
5324 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5327 /* Vector size in bytes and number of units. */
5328 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5330 /* Vector element type and mode. */
5332 enum machine_mode elem_mode;
5334 *no_add_attrs = true;
5336 /* Get the representative array type, possibly nested within a
5337 padding record e.g. for alignment purposes. */
5339 if (TYPE_IS_PADDING_P (rep_type))
5340 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5342 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5344 error ("attribute %qs applies to array types only",
5345 IDENTIFIER_POINTER (name));
5349 /* Silently punt on variable sizes. We can't make vector types for them,
5350 need to ignore them on front-end generated subtypes of unconstrained
5351 bases, and this attribute is for binding implementors, not end-users, so
5352 we should never get there from legitimate explicit uses. */
5354 if (!host_integerp (rep_size, 1))
5357 /* Get the element type/mode and check this is something we know
5358 how to make vectors of. */
5360 elem_type = TREE_TYPE (rep_type);
5361 elem_mode = TYPE_MODE (elem_type);
5363 if ((!INTEGRAL_TYPE_P (elem_type)
5364 && !SCALAR_FLOAT_TYPE_P (elem_type)
5365 && !FIXED_POINT_TYPE_P (elem_type))
5366 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5367 && GET_MODE_CLASS (elem_mode) != MODE_INT
5368 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5369 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5371 error ("invalid element type for attribute %qs",
5372 IDENTIFIER_POINTER (name));
5376 /* Sanity check the vector size and element type consistency. */
5378 vec_bytes = tree_low_cst (rep_size, 1);
5380 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5382 error ("vector size not an integral multiple of component size");
5388 error ("zero vector size");
5392 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5393 if (vec_units & (vec_units - 1))
5395 error ("number of components of the vector not a power of two");
5399 /* Build the vector type and replace. */
5401 *node = build_vector_type (elem_type, vec_units);
5402 rep_name = TYPE_NAME (rep_type);
5403 if (TREE_CODE (rep_name) == TYPE_DECL)
5404 rep_name = DECL_NAME (rep_name);
5405 TYPE_NAME (*node) = rep_name;
5406 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5411 /* ----------------------------------------------------------------------- *
5412 * BUILTIN FUNCTIONS *
5413 * ----------------------------------------------------------------------- */
5415 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5416 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5417 if nonansi_p and flag_no_nonansi_builtin. */
5420 def_builtin_1 (enum built_in_function fncode,
5422 enum built_in_class fnclass,
5423 tree fntype, tree libtype,
5424 bool both_p, bool fallback_p,
5425 bool nonansi_p ATTRIBUTE_UNUSED,
5426 tree fnattrs, bool implicit_p)
5429 const char *libname;
5431 /* Preserve an already installed decl. It most likely was setup in advance
5432 (e.g. as part of the internal builtins) for specific reasons. */
5433 if (built_in_decls[(int) fncode] != NULL_TREE)
5436 gcc_assert ((!both_p && !fallback_p)
5437 || !strncmp (name, "__builtin_",
5438 strlen ("__builtin_")));
5440 libname = name + strlen ("__builtin_");
5441 decl = add_builtin_function (name, fntype, fncode, fnclass,
5442 (fallback_p ? libname : NULL),
5445 /* ??? This is normally further controlled by command-line options
5446 like -fno-builtin, but we don't have them for Ada. */
5447 add_builtin_function (libname, libtype, fncode, fnclass,
5450 built_in_decls[(int) fncode] = decl;
5452 implicit_built_in_decls[(int) fncode] = decl;
5455 static int flag_isoc94 = 0;
5456 static int flag_isoc99 = 0;
5458 /* Install what the common builtins.def offers. */
5461 install_builtin_functions (void)
5463 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5464 NONANSI_P, ATTRS, IMPLICIT, COND) \
5466 def_builtin_1 (ENUM, NAME, CLASS, \
5467 builtin_types[(int) TYPE], \
5468 builtin_types[(int) LIBTYPE], \
5469 BOTH_P, FALLBACK_P, NONANSI_P, \
5470 built_in_attributes[(int) ATTRS], IMPLICIT);
5471 #include "builtins.def"
5475 /* ----------------------------------------------------------------------- *
5476 * BUILTIN FUNCTIONS *
5477 * ----------------------------------------------------------------------- */
5479 /* Install the builtin functions we might need. */
5482 gnat_install_builtins (void)
5484 install_builtin_elementary_types ();
5485 install_builtin_function_types ();
5486 install_builtin_attributes ();
5488 /* Install builtins used by generic middle-end pieces first. Some of these
5489 know about internal specificities and control attributes accordingly, for
5490 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5491 the generic definition from builtins.def. */
5492 build_common_builtin_nodes ();
5494 /* Now, install the target specific builtins, such as the AltiVec family on
5495 ppc, and the common set as exposed by builtins.def. */
5496 targetm.init_builtins ();
5497 install_builtin_functions ();
5500 #include "gt-ada-utils.h"
5501 #include "gtype-ada.h"