1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
62 #ifndef MAX_BITS_PER_WORD
63 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 /* If nonzero, pretend we are allocating at global level. */
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
107 { "const", 0, 0, true, false, false, handle_const_attribute },
108 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
109 { "pure", 0, 0, true, false, false, handle_pure_attribute },
110 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
111 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
112 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
113 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
114 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
115 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
117 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
118 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
119 { "may_alias", 0, 0, false, true, false, NULL },
121 /* ??? format and format_arg are heavy and not supported, which actually
122 prevents support for stdio builtins, which we however declare as part
123 of the common builtins.def contents. */
124 { "format", 3, 3, false, true, true, fake_attribute_handler },
125 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Associates a GNAT tree node to a GCC tree node. It is used in
131 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
132 of `save_gnu_tree' for more info. */
133 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
135 #define GET_GNU_TREE(GNAT_ENTITY) \
136 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
138 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
139 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
141 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
142 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
144 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
145 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
147 #define GET_DUMMY_NODE(GNAT_ENTITY) \
148 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
150 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
151 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
153 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
154 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
156 /* This variable keeps a table for types for each precision so that we only
157 allocate each of them once. Signed and unsigned types are kept separate.
159 Note that these types are only used when fold-const requests something
160 special. Perhaps we should NOT share these types; we'll see how it
162 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
164 /* Likewise for float types, but record these by mode. */
165 static GTY(()) tree float_types[NUM_MACHINE_MODES];
167 /* For each binding contour we allocate a binding_level structure to indicate
168 the binding depth. */
170 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
171 /* The binding level containing this one (the enclosing binding level). */
172 struct gnat_binding_level *chain;
173 /* The BLOCK node for this level. */
175 /* If nonzero, the setjmp buffer that needs to be updated for any
176 variable-sized definition within this context. */
180 /* The binding level currently in effect. */
181 static GTY(()) struct gnat_binding_level *current_binding_level;
183 /* A chain of gnat_binding_level structures awaiting reuse. */
184 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
186 /* An array of global declarations. */
187 static GTY(()) VEC(tree,gc) *global_decls;
189 /* An array of builtin function declarations. */
190 static GTY(()) VEC(tree,gc) *builtin_decls;
192 /* An array of global renaming pointers. */
193 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
195 /* A chain of unused BLOCK nodes. */
196 static GTY((deletable)) tree free_block_chain;
198 static tree merge_sizes (tree, tree, tree, bool, bool);
199 static tree compute_related_constant (tree, tree);
200 static tree split_plus (tree, tree *);
201 static tree float_type_for_precision (int, enum machine_mode);
202 static tree convert_to_fat_pointer (tree, tree);
203 static tree convert_to_thin_pointer (tree, tree);
204 static tree make_descriptor_field (const char *,tree, tree, tree);
205 static bool potential_alignment_gap (tree, tree, tree);
207 /* Initialize the association of GNAT nodes to GCC trees. */
210 init_gnat_to_gnu (void)
212 associate_gnat_to_gnu
213 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
216 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
217 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
218 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
220 If GNU_DECL is zero, a previous association is to be reset. */
223 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
225 /* Check that GNAT_ENTITY is not already defined and that it is being set
226 to something which is a decl. Raise gigi 401 if not. Usually, this
227 means GNAT_ENTITY is defined twice, but occasionally is due to some
229 gcc_assert (!(gnu_decl
230 && (PRESENT_GNU_TREE (gnat_entity)
231 || (!no_check && !DECL_P (gnu_decl)))));
233 SET_GNU_TREE (gnat_entity, gnu_decl);
236 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
237 Return the ..._DECL node that was associated with it. If there is no tree
238 node associated with GNAT_ENTITY, abort.
240 In some cases, such as delayed elaboration or expressions that need to
241 be elaborated only once, GNAT_ENTITY is really not an entity. */
244 get_gnu_tree (Entity_Id gnat_entity)
246 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
247 return GET_GNU_TREE (gnat_entity);
250 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
253 present_gnu_tree (Entity_Id gnat_entity)
255 return PRESENT_GNU_TREE (gnat_entity);
258 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
261 init_dummy_type (void)
264 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
267 /* Make a dummy type corresponding to GNAT_TYPE. */
270 make_dummy_type (Entity_Id gnat_type)
272 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
275 /* If there is an equivalent type, get its underlying type. */
276 if (Present (gnat_underlying))
277 gnat_underlying = Underlying_Type (gnat_underlying);
279 /* If there was no equivalent type (can only happen when just annotating
280 types) or underlying type, go back to the original type. */
281 if (No (gnat_underlying))
282 gnat_underlying = gnat_type;
284 /* If it there already a dummy type, use that one. Else make one. */
285 if (PRESENT_DUMMY_NODE (gnat_underlying))
286 return GET_DUMMY_NODE (gnat_underlying);
288 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
290 gnu_type = make_node (Is_Record_Type (gnat_underlying)
291 ? tree_code_for_record_type (gnat_underlying)
293 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
294 TYPE_DUMMY_P (gnu_type) = 1;
295 TYPE_STUB_DECL (gnu_type)
296 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
297 if (Is_By_Reference_Type (gnat_type))
298 TREE_ADDRESSABLE (gnu_type) = 1;
300 SET_DUMMY_NODE (gnat_underlying, gnu_type);
305 /* Return nonzero if we are currently in the global binding level. */
308 global_bindings_p (void)
310 return ((force_global || !current_function_decl) ? -1 : 0);
313 /* Enter a new binding level. */
316 gnat_pushlevel (void)
318 struct gnat_binding_level *newlevel = NULL;
320 /* Reuse a struct for this binding level, if there is one. */
321 if (free_binding_level)
323 newlevel = free_binding_level;
324 free_binding_level = free_binding_level->chain;
328 = (struct gnat_binding_level *)
329 ggc_alloc (sizeof (struct gnat_binding_level));
331 /* Use a free BLOCK, if any; otherwise, allocate one. */
332 if (free_block_chain)
334 newlevel->block = free_block_chain;
335 free_block_chain = BLOCK_CHAIN (free_block_chain);
336 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
339 newlevel->block = make_node (BLOCK);
341 /* Point the BLOCK we just made to its parent. */
342 if (current_binding_level)
343 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
345 BLOCK_VARS (newlevel->block) = NULL_TREE;
346 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
347 TREE_USED (newlevel->block) = 1;
349 /* Add this level to the front of the chain (stack) of active levels. */
350 newlevel->chain = current_binding_level;
351 newlevel->jmpbuf_decl = NULL_TREE;
352 current_binding_level = newlevel;
355 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
356 and point FNDECL to this BLOCK. */
359 set_current_block_context (tree fndecl)
361 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
362 DECL_INITIAL (fndecl) = current_binding_level->block;
363 set_block_for_group (current_binding_level->block);
366 /* Set the jmpbuf_decl for the current binding level to DECL. */
369 set_block_jmpbuf_decl (tree decl)
371 current_binding_level->jmpbuf_decl = decl;
374 /* Get the jmpbuf_decl, if any, for the current binding level. */
377 get_block_jmpbuf_decl (void)
379 return current_binding_level->jmpbuf_decl;
382 /* Exit a binding level. Set any BLOCK into the current code group. */
387 struct gnat_binding_level *level = current_binding_level;
388 tree block = level->block;
390 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
391 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
393 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
394 are no variables free the block and merge its subblocks into those of its
395 parent block. Otherwise, add it to the list of its parent. */
396 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
398 else if (BLOCK_VARS (block) == NULL_TREE)
400 BLOCK_SUBBLOCKS (level->chain->block)
401 = chainon (BLOCK_SUBBLOCKS (block),
402 BLOCK_SUBBLOCKS (level->chain->block));
403 BLOCK_CHAIN (block) = free_block_chain;
404 free_block_chain = block;
408 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
409 BLOCK_SUBBLOCKS (level->chain->block) = block;
410 TREE_USED (block) = 1;
411 set_block_for_group (block);
414 /* Free this binding structure. */
415 current_binding_level = level->chain;
416 level->chain = free_binding_level;
417 free_binding_level = level;
421 /* Records a ..._DECL node DECL as belonging to the current lexical scope
422 and uses GNAT_NODE for location information and propagating flags. */
425 gnat_pushdecl (tree decl, Node_Id gnat_node)
427 /* If this decl is public external or at toplevel, there is no context.
428 But PARM_DECLs always go in the level of its function. */
429 if (TREE_CODE (decl) != PARM_DECL
430 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
431 || global_bindings_p ()))
432 DECL_CONTEXT (decl) = 0;
435 DECL_CONTEXT (decl) = current_function_decl;
437 /* Functions imported in another function are not really nested.
438 For really nested functions mark them initially as needing
439 a static chain for uses of that flag before unnesting;
440 lower_nested_functions will then recompute it. */
441 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
442 DECL_STATIC_CHAIN (decl) = 1;
445 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
447 /* Set the location of DECL and emit a declaration for it. */
448 if (Present (gnat_node))
449 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
450 add_decl_expr (decl, gnat_node);
452 /* Put the declaration on the list. The list of declarations is in reverse
453 order. The list will be reversed later. Put global variables in the
454 globals list and builtin functions in a dedicated list to speed up
455 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
456 the list, as they will cause trouble with the debugger and aren't needed
458 if (TREE_CODE (decl) != TYPE_DECL
459 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
461 if (global_bindings_p ())
463 VEC_safe_push (tree, gc, global_decls, decl);
465 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
466 VEC_safe_push (tree, gc, builtin_decls, decl);
470 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
471 BLOCK_VARS (current_binding_level->block) = decl;
475 /* For the declaration of a type, set its name if it either is not already
476 set or if the previous type name was not derived from a source name.
477 We'd rather have the type named with a real name and all the pointer
478 types to the same object have the same POINTER_TYPE node. Code in the
479 equivalent function of c-decl.c makes a copy of the type node here, but
480 that may cause us trouble with incomplete types. We make an exception
481 for fat pointer types because the compiler automatically builds them
482 for unconstrained array types and the debugger uses them to represent
483 both these and pointers to these. */
484 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
486 tree t = TREE_TYPE (decl);
488 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
490 else if (TYPE_IS_FAT_POINTER_P (t))
492 tree tt = build_variant_type_copy (t);
493 TYPE_NAME (tt) = decl;
494 TREE_USED (tt) = TREE_USED (t);
495 TREE_TYPE (decl) = tt;
496 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
497 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
499 DECL_ORIGINAL_TYPE (decl) = t;
501 DECL_ARTIFICIAL (decl) = 0;
503 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
508 /* Propagate the name to all the variants. This is needed for
509 the type qualifiers machinery to work properly. */
511 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
512 TYPE_NAME (t) = decl;
516 /* Do little here. Set up the standard declarations later after the
517 front end has been run. */
520 gnat_init_decl_processing (void)
522 build_common_tree_nodes (true, true);
524 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
525 corresponding to the width of Pmode. In most cases when ptr_mode
526 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
527 But we get far better code using the width of Pmode. */
528 size_type_node = gnat_type_for_mode (Pmode, 0);
529 set_sizetype (size_type_node);
531 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
532 boolean_type_node = make_unsigned_type (8);
533 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
534 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
535 build_int_cst (boolean_type_node, 1));
536 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
538 build_common_tree_nodes_2 (0);
539 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
541 ptr_void_type_node = build_pointer_type (void_type_node);
544 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
547 record_builtin_type (const char *name, tree type)
549 tree type_decl = build_decl (input_location,
550 TYPE_DECL, get_identifier (name), type);
552 gnat_pushdecl (type_decl, Empty);
554 if (debug_hooks->type_decl)
555 debug_hooks->type_decl (type_decl, false);
558 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
559 finish constructing the record or union type. If REP_LEVEL is zero, this
560 record has no representation clause and so will be entirely laid out here.
561 If REP_LEVEL is one, this record has a representation clause and has been
562 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
563 this record is derived from a parent record and thus inherits its layout;
564 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
565 we need to write debug information about this type. */
568 finish_record_type (tree record_type, tree field_list, int rep_level,
571 enum tree_code code = TREE_CODE (record_type);
572 tree name = TYPE_NAME (record_type);
573 tree ada_size = bitsize_zero_node;
574 tree size = bitsize_zero_node;
575 bool had_size = TYPE_SIZE (record_type) != 0;
576 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
577 bool had_align = TYPE_ALIGN (record_type) != 0;
580 TYPE_FIELDS (record_type) = field_list;
582 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
583 generate debug info and have a parallel type. */
584 if (name && TREE_CODE (name) == TYPE_DECL)
585 name = DECL_NAME (name);
586 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
588 /* Globally initialize the record first. If this is a rep'ed record,
589 that just means some initializations; otherwise, layout the record. */
592 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
595 TYPE_SIZE_UNIT (record_type) = size_zero_node;
598 TYPE_SIZE (record_type) = bitsize_zero_node;
600 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
601 out just like a UNION_TYPE, since the size will be fixed. */
602 else if (code == QUAL_UNION_TYPE)
607 /* Ensure there isn't a size already set. There can be in an error
608 case where there is a rep clause but all fields have errors and
609 no longer have a position. */
610 TYPE_SIZE (record_type) = 0;
611 layout_type (record_type);
614 /* At this point, the position and size of each field is known. It was
615 either set before entry by a rep clause, or by laying out the type above.
617 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
618 to compute the Ada size; the GCC size and alignment (for rep'ed records
619 that are not padding types); and the mode (for rep'ed records). We also
620 clear the DECL_BIT_FIELD indication for the cases we know have not been
621 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
623 if (code == QUAL_UNION_TYPE)
624 field_list = nreverse (field_list);
626 for (field = field_list; field; field = TREE_CHAIN (field))
628 tree type = TREE_TYPE (field);
629 tree pos = bit_position (field);
630 tree this_size = DECL_SIZE (field);
633 if ((TREE_CODE (type) == RECORD_TYPE
634 || TREE_CODE (type) == UNION_TYPE
635 || TREE_CODE (type) == QUAL_UNION_TYPE)
636 && !TYPE_FAT_POINTER_P (type)
637 && !TYPE_CONTAINS_TEMPLATE_P (type)
638 && TYPE_ADA_SIZE (type))
639 this_ada_size = TYPE_ADA_SIZE (type);
641 this_ada_size = this_size;
643 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
644 if (DECL_BIT_FIELD (field)
645 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
647 unsigned int align = TYPE_ALIGN (type);
649 /* In the general case, type alignment is required. */
650 if (value_factor_p (pos, align))
652 /* The enclosing record type must be sufficiently aligned.
653 Otherwise, if no alignment was specified for it and it
654 has been laid out already, bump its alignment to the
655 desired one if this is compatible with its size. */
656 if (TYPE_ALIGN (record_type) >= align)
658 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
659 DECL_BIT_FIELD (field) = 0;
663 && value_factor_p (TYPE_SIZE (record_type), align))
665 TYPE_ALIGN (record_type) = align;
666 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
667 DECL_BIT_FIELD (field) = 0;
671 /* In the non-strict alignment case, only byte alignment is. */
672 if (!STRICT_ALIGNMENT
673 && DECL_BIT_FIELD (field)
674 && value_factor_p (pos, BITS_PER_UNIT))
675 DECL_BIT_FIELD (field) = 0;
678 /* If we still have DECL_BIT_FIELD set at this point, we know that the
679 field is technically not addressable. Except that it can actually
680 be addressed if it is BLKmode and happens to be properly aligned. */
681 if (DECL_BIT_FIELD (field)
682 && !(DECL_MODE (field) == BLKmode
683 && value_factor_p (pos, BITS_PER_UNIT)))
684 DECL_NONADDRESSABLE_P (field) = 1;
686 /* A type must be as aligned as its most aligned field that is not
687 a bit-field. But this is already enforced by layout_type. */
688 if (rep_level > 0 && !DECL_BIT_FIELD (field))
689 TYPE_ALIGN (record_type)
690 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
695 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
696 size = size_binop (MAX_EXPR, size, this_size);
699 case QUAL_UNION_TYPE:
701 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
702 this_ada_size, ada_size);
703 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
708 /* Since we know here that all fields are sorted in order of
709 increasing bit position, the size of the record is one
710 higher than the ending bit of the last field processed
711 unless we have a rep clause, since in that case we might
712 have a field outside a QUAL_UNION_TYPE that has a higher ending
713 position. So use a MAX in that case. Also, if this field is a
714 QUAL_UNION_TYPE, we need to take into account the previous size in
715 the case of empty variants. */
717 = merge_sizes (ada_size, pos, this_ada_size,
718 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
720 = merge_sizes (size, pos, this_size,
721 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
729 if (code == QUAL_UNION_TYPE)
730 nreverse (field_list);
734 /* If this is a padding record, we never want to make the size smaller
735 than what was specified in it, if any. */
736 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
737 size = TYPE_SIZE (record_type);
739 /* Now set any of the values we've just computed that apply. */
740 if (!TYPE_FAT_POINTER_P (record_type)
741 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
742 SET_TYPE_ADA_SIZE (record_type, ada_size);
746 tree size_unit = had_size_unit
747 ? TYPE_SIZE_UNIT (record_type)
749 size_binop (CEIL_DIV_EXPR, size,
751 unsigned int align = TYPE_ALIGN (record_type);
753 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
754 TYPE_SIZE_UNIT (record_type)
755 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
757 compute_record_mode (record_type);
762 rest_of_record_type_compilation (record_type);
765 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
766 associated with it. It need not be invoked directly in most cases since
767 finish_record_type takes care of doing so, but this can be necessary if
768 a parallel type is to be attached to the record type. */
771 rest_of_record_type_compilation (tree record_type)
773 tree field_list = TYPE_FIELDS (record_type);
775 enum tree_code code = TREE_CODE (record_type);
776 bool var_size = false;
778 for (field = field_list; field; field = TREE_CHAIN (field))
780 /* We need to make an XVE/XVU record if any field has variable size,
781 whether or not the record does. For example, if we have a union,
782 it may be that all fields, rounded up to the alignment, have the
783 same size, in which case we'll use that size. But the debug
784 output routines (except Dwarf2) won't be able to output the fields,
785 so we need to make the special record. */
786 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
787 /* If a field has a non-constant qualifier, the record will have
788 variable size too. */
789 || (code == QUAL_UNION_TYPE
790 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
797 /* If this record is of variable size, rename it so that the
798 debugger knows it is and make a new, parallel, record
799 that tells the debugger how the record is laid out. See
800 exp_dbug.ads. But don't do this for records that are padding
801 since they confuse GDB. */
802 if (var_size && !TYPE_IS_PADDING_P (record_type))
805 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
806 ? UNION_TYPE : TREE_CODE (record_type));
807 tree orig_name = TYPE_NAME (record_type), new_name;
808 tree last_pos = bitsize_zero_node;
809 tree old_field, prev_old_field = NULL_TREE;
811 if (TREE_CODE (orig_name) == TYPE_DECL)
812 orig_name = DECL_NAME (orig_name);
815 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
817 TYPE_NAME (new_record_type) = new_name;
818 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
819 TYPE_STUB_DECL (new_record_type)
820 = create_type_stub_decl (new_name, new_record_type);
821 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
822 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
823 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
824 TYPE_SIZE_UNIT (new_record_type)
825 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
827 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
829 /* Now scan all the fields, replacing each field with a new
830 field corresponding to the new encoding. */
831 for (old_field = TYPE_FIELDS (record_type); old_field;
832 old_field = TREE_CHAIN (old_field))
834 tree field_type = TREE_TYPE (old_field);
835 tree field_name = DECL_NAME (old_field);
837 tree curpos = bit_position (old_field);
839 unsigned int align = 0;
842 /* See how the position was modified from the last position.
844 There are two basic cases we support: a value was added
845 to the last position or the last position was rounded to
846 a boundary and they something was added. Check for the
847 first case first. If not, see if there is any evidence
848 of rounding. If so, round the last position and try
851 If this is a union, the position can be taken as zero. */
853 /* Some computations depend on the shape of the position expression,
854 so strip conversions to make sure it's exposed. */
855 curpos = remove_conversions (curpos, true);
857 if (TREE_CODE (new_record_type) == UNION_TYPE)
858 pos = bitsize_zero_node, align = 0;
860 pos = compute_related_constant (curpos, last_pos);
862 if (!pos && TREE_CODE (curpos) == MULT_EXPR
863 && host_integerp (TREE_OPERAND (curpos, 1), 1))
865 tree offset = TREE_OPERAND (curpos, 0);
866 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
868 /* An offset which is a bitwise AND with a negative power of 2
869 means an alignment corresponding to this power of 2. */
870 offset = remove_conversions (offset, true);
871 if (TREE_CODE (offset) == BIT_AND_EXPR
872 && host_integerp (TREE_OPERAND (offset, 1), 0)
873 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
876 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
877 if (exact_log2 (pow) > 0)
881 pos = compute_related_constant (curpos,
882 round_up (last_pos, align));
884 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
885 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
886 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
887 && host_integerp (TREE_OPERAND
888 (TREE_OPERAND (curpos, 0), 1),
893 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
894 pos = compute_related_constant (curpos,
895 round_up (last_pos, align));
897 else if (potential_alignment_gap (prev_old_field, old_field,
900 align = TYPE_ALIGN (field_type);
901 pos = compute_related_constant (curpos,
902 round_up (last_pos, align));
905 /* If we can't compute a position, set it to zero.
907 ??? We really should abort here, but it's too much work
908 to get this correct for all cases. */
911 pos = bitsize_zero_node;
913 /* See if this type is variable-sized and make a pointer type
914 and indicate the indirection if so. Beware that the debug
915 back-end may adjust the position computed above according
916 to the alignment of the field type, i.e. the pointer type
917 in this case, if we don't preventively counter that. */
918 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
920 field_type = build_pointer_type (field_type);
921 if (align != 0 && TYPE_ALIGN (field_type) > align)
923 field_type = copy_node (field_type);
924 TYPE_ALIGN (field_type) = align;
929 /* Make a new field name, if necessary. */
930 if (var || align != 0)
935 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
936 align / BITS_PER_UNIT);
938 strcpy (suffix, "XVL");
940 field_name = concat_name (field_name, suffix);
943 new_field = create_field_decl (field_name, field_type,
945 DECL_SIZE (old_field), pos, 0);
946 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
947 TYPE_FIELDS (new_record_type) = new_field;
949 /* If old_field is a QUAL_UNION_TYPE, take its size as being
950 zero. The only time it's not the last field of the record
951 is when there are other components at fixed positions after
952 it (meaning there was a rep clause for every field) and we
953 want to be able to encode them. */
954 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
955 (TREE_CODE (TREE_TYPE (old_field))
958 : DECL_SIZE (old_field));
959 prev_old_field = old_field;
962 TYPE_FIELDS (new_record_type)
963 = nreverse (TYPE_FIELDS (new_record_type));
965 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
968 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
971 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
974 add_parallel_type (tree decl, tree parallel_type)
978 while (DECL_PARALLEL_TYPE (d))
979 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
981 SET_DECL_PARALLEL_TYPE (d, parallel_type);
984 /* Return the parallel type associated to a type, if any. */
987 get_parallel_type (tree type)
989 if (TYPE_STUB_DECL (type))
990 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
995 /* Utility function of above to merge LAST_SIZE, the previous size of a record
996 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
997 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
998 replace a value of zero with the old size. If HAS_REP is true, we take the
999 MAX of the end position of this field with LAST_SIZE. In all other cases,
1000 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1003 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1006 tree type = TREE_TYPE (last_size);
1009 if (!special || TREE_CODE (size) != COND_EXPR)
1011 new_size = size_binop (PLUS_EXPR, first_bit, size);
1013 new_size = size_binop (MAX_EXPR, last_size, new_size);
1017 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1018 integer_zerop (TREE_OPERAND (size, 1))
1019 ? last_size : merge_sizes (last_size, first_bit,
1020 TREE_OPERAND (size, 1),
1022 integer_zerop (TREE_OPERAND (size, 2))
1023 ? last_size : merge_sizes (last_size, first_bit,
1024 TREE_OPERAND (size, 2),
1027 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1028 when fed through substitute_in_expr) into thinking that a constant
1029 size is not constant. */
1030 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1031 new_size = TREE_OPERAND (new_size, 0);
1036 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1037 related by the addition of a constant. Return that constant if so. */
1040 compute_related_constant (tree op0, tree op1)
1042 tree op0_var, op1_var;
1043 tree op0_con = split_plus (op0, &op0_var);
1044 tree op1_con = split_plus (op1, &op1_var);
1045 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1047 if (operand_equal_p (op0_var, op1_var, 0))
1049 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1055 /* Utility function of above to split a tree OP which may be a sum, into a
1056 constant part, which is returned, and a variable part, which is stored
1057 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1061 split_plus (tree in, tree *pvar)
1063 /* Strip NOPS in order to ease the tree traversal and maximize the
1064 potential for constant or plus/minus discovery. We need to be careful
1065 to always return and set *pvar to bitsizetype trees, but it's worth
1069 *pvar = convert (bitsizetype, in);
1071 if (TREE_CODE (in) == INTEGER_CST)
1073 *pvar = bitsize_zero_node;
1074 return convert (bitsizetype, in);
1076 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1078 tree lhs_var, rhs_var;
1079 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1080 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1082 if (lhs_var == TREE_OPERAND (in, 0)
1083 && rhs_var == TREE_OPERAND (in, 1))
1084 return bitsize_zero_node;
1086 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1087 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1090 return bitsize_zero_node;
1093 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1094 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1095 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1096 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1097 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1098 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1099 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1100 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1101 invisible reference. */
1104 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1105 bool return_unconstrained_p, bool return_by_direct_ref_p,
1106 bool return_by_invisi_ref_p)
1108 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1109 the subprogram formal parameters. This list is generated by traversing
1110 the input list of PARM_DECL nodes. */
1111 tree param_type_list = NULL_TREE;
1114 for (t = param_decl_list; t; t = TREE_CHAIN (t))
1115 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (t), param_type_list);
1117 /* The list of the function parameter types has to be terminated by the void
1118 type to signal to the back-end that we are not dealing with a variable
1119 parameter subprogram, but that it has a fixed number of parameters. */
1120 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1122 /* The list of argument types has been created in reverse so reverse it. */
1123 param_type_list = nreverse (param_type_list);
1125 type = build_function_type (return_type, param_type_list);
1127 /* TYPE may have been shared since GCC hashes types. If it has a different
1128 CICO_LIST, make a copy. Likewise for the various flags. */
1129 if (TYPE_CI_CO_LIST (type) != cico_list
1130 || TYPE_RETURN_UNCONSTRAINED_P (type) != return_unconstrained_p
1131 || TYPE_RETURN_BY_DIRECT_REF_P (type) != return_by_direct_ref_p
1132 || TREE_ADDRESSABLE (type) != return_by_invisi_ref_p)
1134 type = copy_type (type);
1135 TYPE_CI_CO_LIST (type) = cico_list;
1136 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1137 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1138 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1144 /* Return a copy of TYPE but safe to modify in any way. */
1147 copy_type (tree type)
1149 tree new_type = copy_node (type);
1151 /* Unshare the language-specific data. */
1152 if (TYPE_LANG_SPECIFIC (type))
1154 TYPE_LANG_SPECIFIC (new_type) = NULL;
1155 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1158 /* And the contents of the language-specific slot if needed. */
1159 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1160 && TYPE_RM_VALUES (type))
1162 TYPE_RM_VALUES (new_type) = NULL_TREE;
1163 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1164 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1165 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1168 /* copy_node clears this field instead of copying it, because it is
1169 aliased with TREE_CHAIN. */
1170 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1172 TYPE_POINTER_TO (new_type) = 0;
1173 TYPE_REFERENCE_TO (new_type) = 0;
1174 TYPE_MAIN_VARIANT (new_type) = new_type;
1175 TYPE_NEXT_VARIANT (new_type) = 0;
1180 /* Return a subtype of sizetype with range MIN to MAX and whose
1181 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1182 of the associated TYPE_DECL. */
1185 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1187 /* First build a type for the desired range. */
1188 tree type = build_index_2_type (min, max);
1190 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1191 if (TYPE_INDEX_TYPE (type) == index)
1194 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1195 no way of sharing these types, but that's only a small hole. */
1196 if (TYPE_INDEX_TYPE (type))
1197 type = copy_type (type);
1199 SET_TYPE_INDEX_TYPE (type, index);
1200 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1205 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1206 sizetype is used. */
1209 create_range_type (tree type, tree min, tree max)
1213 if (type == NULL_TREE)
1216 /* First build a type with the base range. */
1218 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1220 min = convert (type, min);
1221 max = convert (type, max);
1223 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1224 if (TYPE_RM_MIN_VALUE (range_type)
1225 && TYPE_RM_MAX_VALUE (range_type)
1226 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1227 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1230 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1231 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1232 range_type = copy_type (range_type);
1234 /* Then set the actual range. */
1235 SET_TYPE_RM_MIN_VALUE (range_type, min);
1236 SET_TYPE_RM_MAX_VALUE (range_type, max);
1241 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1242 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1246 create_type_stub_decl (tree type_name, tree type)
1248 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1249 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1250 emitted in DWARF. */
1251 tree type_decl = build_decl (input_location,
1252 TYPE_DECL, type_name, type);
1253 DECL_ARTIFICIAL (type_decl) = 1;
1257 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1258 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1259 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1260 true if we need to write debug information about this type. GNAT_NODE
1261 is used for the position of the decl. */
1264 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1265 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1267 enum tree_code code = TREE_CODE (type);
1268 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1271 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1272 gcc_assert (!TYPE_IS_DUMMY_P (type));
1274 /* If the type hasn't been named yet, we're naming it; preserve an existing
1275 TYPE_STUB_DECL that has been attached to it for some purpose. */
1276 if (!named && TYPE_STUB_DECL (type))
1278 type_decl = TYPE_STUB_DECL (type);
1279 DECL_NAME (type_decl) = type_name;
1282 type_decl = build_decl (input_location,
1283 TYPE_DECL, type_name, type);
1285 DECL_ARTIFICIAL (type_decl) = artificial_p;
1286 gnat_pushdecl (type_decl, gnat_node);
1287 process_attributes (type_decl, attr_list);
1289 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1290 This causes the name to be also viewed as a "tag" by the debug
1291 back-end, with the advantage that no DW_TAG_typedef is emitted
1292 for artificial "tagged" types in DWARF. */
1294 TYPE_STUB_DECL (type) = type_decl;
1296 /* Pass the type declaration to the debug back-end unless this is an
1297 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1298 type for which debugging information was not requested, or else an
1299 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1300 handled separately. And do not pass dummy types either. */
1301 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1302 DECL_IGNORED_P (type_decl) = 1;
1303 else if (code != ENUMERAL_TYPE
1304 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1305 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1306 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1307 && !(code == RECORD_TYPE
1309 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1310 rest_of_type_decl_compilation (type_decl);
1315 /* Return a VAR_DECL or CONST_DECL node.
1317 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1318 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1319 the GCC tree for an optional initial expression; NULL_TREE if none.
1321 CONST_FLAG is true if this variable is constant, in which case we might
1322 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1324 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1325 definition to be made visible outside of the current compilation unit, for
1326 instance variable definitions in a package specification.
1328 EXTERN_FLAG is true when processing an external variable declaration (as
1329 opposed to a definition: no storage is to be allocated for the variable).
1331 STATIC_FLAG is only relevant when not at top level. In that case
1332 it indicates whether to always allocate storage to the variable.
1334 GNAT_NODE is used for the position of the decl. */
1337 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1338 bool const_flag, bool public_flag, bool extern_flag,
1339 bool static_flag, bool const_decl_allowed_p,
1340 struct attrib *attr_list, Node_Id gnat_node)
1344 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1345 && (global_bindings_p () || static_flag
1346 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1347 : TREE_CONSTANT (var_init)));
1349 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1350 case the initializer may be used in-lieu of the DECL node (as done in
1351 Identifier_to_gnu). This is useful to prevent the need of elaboration
1352 code when an identifier for which such a decl is made is in turn used as
1353 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1354 but extra constraints apply to this choice (see below) and are not
1355 relevant to the distinction we wish to make. */
1356 bool constant_p = const_flag && init_const;
1358 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1359 and may be used for scalars in general but not for aggregates. */
1361 = build_decl (input_location,
1362 (constant_p && const_decl_allowed_p
1363 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1366 /* If this is external, throw away any initializations (they will be done
1367 elsewhere) unless this is a constant for which we would like to remain
1368 able to get the initializer. If we are defining a global here, leave a
1369 constant initialization and save any variable elaborations for the
1370 elaboration routine. If we are just annotating types, throw away the
1371 initialization if it isn't a constant. */
1372 if ((extern_flag && !constant_p)
1373 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1374 var_init = NULL_TREE;
1376 /* At the global level, an initializer requiring code to be generated
1377 produces elaboration statements. Check that such statements are allowed,
1378 that is, not violating a No_Elaboration_Code restriction. */
1379 if (global_bindings_p () && var_init != 0 && !init_const)
1380 Check_Elaboration_Code_Allowed (gnat_node);
1382 DECL_INITIAL (var_decl) = var_init;
1383 TREE_READONLY (var_decl) = const_flag;
1384 DECL_EXTERNAL (var_decl) = extern_flag;
1385 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1386 TREE_CONSTANT (var_decl) = constant_p;
1387 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1388 = TYPE_VOLATILE (type);
1390 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1391 try to fiddle with DECL_COMMON. However, on platforms that don't
1392 support global BSS sections, uninitialized global variables would
1393 go in DATA instead, thus increasing the size of the executable. */
1395 && TREE_CODE (var_decl) == VAR_DECL
1396 && TREE_PUBLIC (var_decl)
1397 && !have_global_bss_p ())
1398 DECL_COMMON (var_decl) = 1;
1400 /* If it's public and not external, always allocate storage for it.
1401 At the global binding level we need to allocate static storage for the
1402 variable if and only if it's not external. If we are not at the top level
1403 we allocate automatic storage unless requested not to. */
1404 TREE_STATIC (var_decl)
1405 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1407 /* For an external constant whose initializer is not absolute, do not emit
1408 debug info. In DWARF this would mean a global relocation in a read-only
1409 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1412 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1413 != null_pointer_node)
1414 DECL_IGNORED_P (var_decl) = 1;
1416 if (TREE_CODE (var_decl) == VAR_DECL)
1419 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1420 process_attributes (var_decl, attr_list);
1423 /* Add this decl to the current binding level. */
1424 gnat_pushdecl (var_decl, gnat_node);
1426 if (TREE_SIDE_EFFECTS (var_decl))
1427 TREE_ADDRESSABLE (var_decl) = 1;
1429 if (TREE_CODE (var_decl) != CONST_DECL)
1431 if (global_bindings_p ())
1432 rest_of_decl_compilation (var_decl, true, 0);
1435 expand_decl (var_decl);
1440 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1443 aggregate_type_contains_array_p (tree type)
1445 switch (TREE_CODE (type))
1449 case QUAL_UNION_TYPE:
1452 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1453 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1454 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1467 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1468 its type and RECORD_TYPE is the type of the enclosing record. PACKED is
1469 1 if the enclosing record is packed, -1 if it has Component_Alignment of
1470 Storage_Unit. If SIZE is nonzero, it is the specified size of the field.
1471 If POS is nonzero, it is the bit position. If ADDRESSABLE is nonzero, it
1472 means we are allowed to take the address of the field; if it is negative,
1473 we should not make a bitfield, which is used by make_aligning_type. */
1476 create_field_decl (tree field_name, tree field_type, tree record_type,
1477 int packed, tree size, tree pos, int addressable)
1479 tree field_decl = build_decl (input_location,
1480 FIELD_DECL, field_name, field_type);
1482 DECL_CONTEXT (field_decl) = record_type;
1483 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1485 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1486 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1487 Likewise for an aggregate without specified position that contains an
1488 array, because in this case slices of variable length of this array
1489 must be handled by GCC and variable-sized objects need to be aligned
1490 to at least a byte boundary. */
1491 if (packed && (TYPE_MODE (field_type) == BLKmode
1493 && AGGREGATE_TYPE_P (field_type)
1494 && aggregate_type_contains_array_p (field_type))))
1495 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1497 /* If a size is specified, use it. Otherwise, if the record type is packed
1498 compute a size to use, which may differ from the object's natural size.
1499 We always set a size in this case to trigger the checks for bitfield
1500 creation below, which is typically required when no position has been
1503 size = convert (bitsizetype, size);
1504 else if (packed == 1)
1506 size = rm_size (field_type);
1507 if (TYPE_MODE (field_type) == BLKmode)
1508 size = round_up (size, BITS_PER_UNIT);
1511 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1512 specified for two reasons: first if the size differs from the natural
1513 size. Second, if the alignment is insufficient. There are a number of
1514 ways the latter can be true.
1516 We never make a bitfield if the type of the field has a nonconstant size,
1517 because no such entity requiring bitfield operations should reach here.
1519 We do *preventively* make a bitfield when there might be the need for it
1520 but we don't have all the necessary information to decide, as is the case
1521 of a field with no specified position in a packed record.
1523 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1524 in layout_decl or finish_record_type to clear the bit_field indication if
1525 it is in fact not needed. */
1526 if (addressable >= 0
1528 && TREE_CODE (size) == INTEGER_CST
1529 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1530 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1531 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1533 || (TYPE_ALIGN (record_type) != 0
1534 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1536 DECL_BIT_FIELD (field_decl) = 1;
1537 DECL_SIZE (field_decl) = size;
1538 if (!packed && !pos)
1540 if (TYPE_ALIGN (record_type) != 0
1541 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1542 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1544 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1548 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1550 /* Bump the alignment if need be, either for bitfield/packing purposes or
1551 to satisfy the type requirements if no such consideration applies. When
1552 we get the alignment from the type, indicate if this is from an explicit
1553 user request, which prevents stor-layout from lowering it later on. */
1555 unsigned int bit_align
1556 = (DECL_BIT_FIELD (field_decl) ? 1
1557 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1559 if (bit_align > DECL_ALIGN (field_decl))
1560 DECL_ALIGN (field_decl) = bit_align;
1561 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1563 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1564 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1570 /* We need to pass in the alignment the DECL is known to have.
1571 This is the lowest-order bit set in POS, but no more than
1572 the alignment of the record, if one is specified. Note
1573 that an alignment of 0 is taken as infinite. */
1574 unsigned int known_align;
1576 if (host_integerp (pos, 1))
1577 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1579 known_align = BITS_PER_UNIT;
1581 if (TYPE_ALIGN (record_type)
1582 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1583 known_align = TYPE_ALIGN (record_type);
1585 layout_decl (field_decl, known_align);
1586 SET_DECL_OFFSET_ALIGN (field_decl,
1587 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1589 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1590 &DECL_FIELD_BIT_OFFSET (field_decl),
1591 DECL_OFFSET_ALIGN (field_decl), pos);
1594 /* In addition to what our caller says, claim the field is addressable if we
1595 know that its type is not suitable.
1597 The field may also be "technically" nonaddressable, meaning that even if
1598 we attempt to take the field's address we will actually get the address
1599 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1600 value we have at this point is not accurate enough, so we don't account
1601 for this here and let finish_record_type decide. */
1602 if (!addressable && !type_for_nonaliased_component_p (field_type))
1605 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1610 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1611 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1612 (either an In parameter or an address of a pass-by-ref parameter). */
1615 create_param_decl (tree param_name, tree param_type, bool readonly)
1617 tree param_decl = build_decl (input_location,
1618 PARM_DECL, param_name, param_type);
1620 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1621 can lead to various ABI violations. */
1622 if (targetm.calls.promote_prototypes (NULL_TREE)
1623 && INTEGRAL_TYPE_P (param_type)
1624 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1626 /* We have to be careful about biased types here. Make a subtype
1627 of integer_type_node with the proper biasing. */
1628 if (TREE_CODE (param_type) == INTEGER_TYPE
1629 && TYPE_BIASED_REPRESENTATION_P (param_type))
1632 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1633 TREE_TYPE (subtype) = integer_type_node;
1634 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1635 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1636 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1637 param_type = subtype;
1640 param_type = integer_type_node;
1643 DECL_ARG_TYPE (param_decl) = param_type;
1644 TREE_READONLY (param_decl) = readonly;
1648 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1651 process_attributes (tree decl, struct attrib *attr_list)
1653 for (; attr_list; attr_list = attr_list->next)
1654 switch (attr_list->type)
1656 case ATTR_MACHINE_ATTRIBUTE:
1657 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1659 ATTR_FLAG_TYPE_IN_PLACE);
1662 case ATTR_LINK_ALIAS:
1663 if (! DECL_EXTERNAL (decl))
1665 TREE_STATIC (decl) = 1;
1666 assemble_alias (decl, attr_list->name);
1670 case ATTR_WEAK_EXTERNAL:
1672 declare_weak (decl);
1674 post_error ("?weak declarations not supported on this target",
1675 attr_list->error_point);
1678 case ATTR_LINK_SECTION:
1679 if (targetm.have_named_sections)
1681 DECL_SECTION_NAME (decl)
1682 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1683 IDENTIFIER_POINTER (attr_list->name));
1684 DECL_COMMON (decl) = 0;
1687 post_error ("?section attributes are not supported for this target",
1688 attr_list->error_point);
1691 case ATTR_LINK_CONSTRUCTOR:
1692 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1693 TREE_USED (decl) = 1;
1696 case ATTR_LINK_DESTRUCTOR:
1697 DECL_STATIC_DESTRUCTOR (decl) = 1;
1698 TREE_USED (decl) = 1;
1701 case ATTR_THREAD_LOCAL_STORAGE:
1702 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1703 DECL_COMMON (decl) = 0;
1708 /* Record DECL as a global renaming pointer. */
1711 record_global_renaming_pointer (tree decl)
1713 gcc_assert (DECL_RENAMED_OBJECT (decl));
1714 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1717 /* Invalidate the global renaming pointers. */
1720 invalidate_global_renaming_pointers (void)
1725 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1726 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1728 VEC_free (tree, gc, global_renaming_pointers);
1731 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1735 value_factor_p (tree value, HOST_WIDE_INT factor)
1737 if (host_integerp (value, 1))
1738 return tree_low_cst (value, 1) % factor == 0;
1740 if (TREE_CODE (value) == MULT_EXPR)
1741 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1742 || value_factor_p (TREE_OPERAND (value, 1), factor));
1747 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1748 unless we can prove these 2 fields are laid out in such a way that no gap
1749 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1750 is the distance in bits between the end of PREV_FIELD and the starting
1751 position of CURR_FIELD. It is ignored if null. */
1754 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1756 /* If this is the first field of the record, there cannot be any gap */
1760 /* If the previous field is a union type, then return False: The only
1761 time when such a field is not the last field of the record is when
1762 there are other components at fixed positions after it (meaning there
1763 was a rep clause for every field), in which case we don't want the
1764 alignment constraint to override them. */
1765 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1768 /* If the distance between the end of prev_field and the beginning of
1769 curr_field is constant, then there is a gap if the value of this
1770 constant is not null. */
1771 if (offset && host_integerp (offset, 1))
1772 return !integer_zerop (offset);
1774 /* If the size and position of the previous field are constant,
1775 then check the sum of this size and position. There will be a gap
1776 iff it is not multiple of the current field alignment. */
1777 if (host_integerp (DECL_SIZE (prev_field), 1)
1778 && host_integerp (bit_position (prev_field), 1))
1779 return ((tree_low_cst (bit_position (prev_field), 1)
1780 + tree_low_cst (DECL_SIZE (prev_field), 1))
1781 % DECL_ALIGN (curr_field) != 0);
1783 /* If both the position and size of the previous field are multiples
1784 of the current field alignment, there cannot be any gap. */
1785 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1786 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1789 /* Fallback, return that there may be a potential gap */
1793 /* Returns a LABEL_DECL node for LABEL_NAME. */
1796 create_label_decl (tree label_name)
1798 tree label_decl = build_decl (input_location,
1799 LABEL_DECL, label_name, void_type_node);
1801 DECL_CONTEXT (label_decl) = current_function_decl;
1802 DECL_MODE (label_decl) = VOIDmode;
1803 DECL_SOURCE_LOCATION (label_decl) = input_location;
1808 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1809 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1810 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1811 PARM_DECL nodes chained through the TREE_CHAIN field).
1813 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1814 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1817 create_subprog_decl (tree subprog_name, tree asm_name,
1818 tree subprog_type, tree param_decl_list, bool inline_flag,
1819 bool public_flag, bool extern_flag,
1820 struct attrib *attr_list, Node_Id gnat_node)
1822 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1824 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1825 TREE_TYPE (subprog_type));
1827 /* If this is a non-inline function nested inside an inlined external
1828 function, we cannot honor both requests without cloning the nested
1829 function in the current unit since it is private to the other unit.
1830 We could inline the nested function as well but it's probably better
1831 to err on the side of too little inlining. */
1833 && current_function_decl
1834 && DECL_DECLARED_INLINE_P (current_function_decl)
1835 && DECL_EXTERNAL (current_function_decl))
1836 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1838 DECL_EXTERNAL (subprog_decl) = extern_flag;
1839 TREE_PUBLIC (subprog_decl) = public_flag;
1840 TREE_STATIC (subprog_decl) = 1;
1841 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1842 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1843 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1844 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1845 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1847 DECL_ARTIFICIAL (result_decl) = 1;
1848 DECL_IGNORED_P (result_decl) = 1;
1849 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1850 DECL_RESULT (subprog_decl) = result_decl;
1854 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1856 /* The expand_main_function circuitry expects "main_identifier_node" to
1857 designate the DECL_NAME of the 'main' entry point, in turn expected
1858 to be declared as the "main" function literally by default. Ada
1859 program entry points are typically declared with a different name
1860 within the binder generated file, exported as 'main' to satisfy the
1861 system expectations. Force main_identifier_node in this case. */
1862 if (asm_name == main_identifier_node)
1863 DECL_NAME (subprog_decl) = main_identifier_node;
1866 process_attributes (subprog_decl, attr_list);
1868 /* Add this decl to the current binding level. */
1869 gnat_pushdecl (subprog_decl, gnat_node);
1871 /* Output the assembler code and/or RTL for the declaration. */
1872 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1874 return subprog_decl;
1877 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1878 body. This routine needs to be invoked before processing the declarations
1879 appearing in the subprogram. */
1882 begin_subprog_body (tree subprog_decl)
1886 current_function_decl = subprog_decl;
1887 announce_function (subprog_decl);
1889 /* Enter a new binding level and show that all the parameters belong to
1893 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1894 param_decl = TREE_CHAIN (param_decl))
1895 DECL_CONTEXT (param_decl) = subprog_decl;
1897 make_decl_rtl (subprog_decl);
1899 /* We handle pending sizes via the elaboration of types, so we don't need to
1900 save them. This causes them to be marked as part of the outer function
1901 and then discarded. */
1902 get_pending_sizes ();
1905 /* Finish the definition of the current subprogram BODY and finalize it. */
1908 end_subprog_body (tree body)
1910 tree fndecl = current_function_decl;
1912 /* Mark the BLOCK for this level as being for this function and pop the
1913 level. Since the vars in it are the parameters, clear them. */
1914 BLOCK_VARS (current_binding_level->block) = NULL_TREE;
1915 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1916 DECL_INITIAL (fndecl) = current_binding_level->block;
1919 /* We handle pending sizes via the elaboration of types, so we don't
1920 need to save them. */
1921 get_pending_sizes ();
1923 /* Mark the RESULT_DECL as being in this subprogram. */
1924 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1926 DECL_SAVED_TREE (fndecl) = body;
1928 current_function_decl = DECL_CONTEXT (fndecl);
1931 /* We cannot track the location of errors past this point. */
1932 error_gnat_node = Empty;
1934 /* If we're only annotating types, don't actually compile this function. */
1935 if (type_annotate_only)
1938 /* Dump functions before gimplification. */
1939 dump_function (TDI_original, fndecl);
1941 /* ??? This special handling of nested functions is probably obsolete. */
1942 if (!DECL_CONTEXT (fndecl))
1943 cgraph_finalize_function (fndecl, false);
1945 /* Register this function with cgraph just far enough to get it
1946 added to our parent's nested function list. */
1947 (void) cgraph_node (fndecl);
1951 gnat_builtin_function (tree decl)
1953 gnat_pushdecl (decl, Empty);
1957 /* Return an integer type with the number of bits of precision given by
1958 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1959 it is a signed type. */
1962 gnat_type_for_size (unsigned precision, int unsignedp)
1967 if (precision <= 2 * MAX_BITS_PER_WORD
1968 && signed_and_unsigned_types[precision][unsignedp])
1969 return signed_and_unsigned_types[precision][unsignedp];
1972 t = make_unsigned_type (precision);
1974 t = make_signed_type (precision);
1976 if (precision <= 2 * MAX_BITS_PER_WORD)
1977 signed_and_unsigned_types[precision][unsignedp] = t;
1981 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1982 TYPE_NAME (t) = get_identifier (type_name);
1988 /* Likewise for floating-point types. */
1991 float_type_for_precision (int precision, enum machine_mode mode)
1996 if (float_types[(int) mode])
1997 return float_types[(int) mode];
1999 float_types[(int) mode] = t = make_node (REAL_TYPE);
2000 TYPE_PRECISION (t) = precision;
2003 gcc_assert (TYPE_MODE (t) == mode);
2006 sprintf (type_name, "FLOAT_%d", precision);
2007 TYPE_NAME (t) = get_identifier (type_name);
2013 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2014 an unsigned type; otherwise a signed type is returned. */
2017 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2019 if (mode == BLKmode)
2022 if (mode == VOIDmode)
2023 return void_type_node;
2025 if (COMPLEX_MODE_P (mode))
2028 if (SCALAR_FLOAT_MODE_P (mode))
2029 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2031 if (SCALAR_INT_MODE_P (mode))
2032 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2034 if (VECTOR_MODE_P (mode))
2036 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2037 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2039 return build_vector_type_for_mode (inner_type, mode);
2045 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2048 gnat_unsigned_type (tree type_node)
2050 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2052 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2054 type = copy_node (type);
2055 TREE_TYPE (type) = type_node;
2057 else if (TREE_TYPE (type_node)
2058 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2059 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2061 type = copy_node (type);
2062 TREE_TYPE (type) = TREE_TYPE (type_node);
2068 /* Return the signed version of a TYPE_NODE, a scalar type. */
2071 gnat_signed_type (tree type_node)
2073 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2075 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2077 type = copy_node (type);
2078 TREE_TYPE (type) = type_node;
2080 else if (TREE_TYPE (type_node)
2081 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2082 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2084 type = copy_node (type);
2085 TREE_TYPE (type) = TREE_TYPE (type_node);
2091 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2092 transparently converted to each other. */
2095 gnat_types_compatible_p (tree t1, tree t2)
2097 enum tree_code code;
2099 /* This is the default criterion. */
2100 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2103 /* We only check structural equivalence here. */
2104 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2107 /* Vector types are also compatible if they have the same number of subparts
2108 and the same form of (scalar) element type. */
2109 if (code == VECTOR_TYPE
2110 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2111 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2112 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2115 /* Array types are also compatible if they are constrained and have
2116 the same component type and the same domain. */
2117 if (code == ARRAY_TYPE
2118 && TREE_TYPE (t1) == TREE_TYPE (t2)
2119 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2120 || (TYPE_DOMAIN (t1)
2122 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2123 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2124 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2125 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2128 /* Padding record types are also compatible if they pad the same
2129 type and have the same constant size. */
2130 if (code == RECORD_TYPE
2131 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2132 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2133 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2139 /* EXP is an expression for the size of an object. If this size contains
2140 discriminant references, replace them with the maximum (if MAX_P) or
2141 minimum (if !MAX_P) possible value of the discriminant. */
2144 max_size (tree exp, bool max_p)
2146 enum tree_code code = TREE_CODE (exp);
2147 tree type = TREE_TYPE (exp);
2149 switch (TREE_CODE_CLASS (code))
2151 case tcc_declaration:
2156 if (code == CALL_EXPR)
2161 t = maybe_inline_call_in_expr (exp);
2163 return max_size (t, max_p);
2165 n = call_expr_nargs (exp);
2167 argarray = (tree *) alloca (n * sizeof (tree));
2168 for (i = 0; i < n; i++)
2169 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2170 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2175 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2176 modify. Otherwise, we treat it like a variable. */
2177 if (!CONTAINS_PLACEHOLDER_P (exp))
2180 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2182 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2184 case tcc_comparison:
2185 return max_p ? size_one_node : size_zero_node;
2189 case tcc_expression:
2190 switch (TREE_CODE_LENGTH (code))
2193 if (code == NON_LVALUE_EXPR)
2194 return max_size (TREE_OPERAND (exp, 0), max_p);
2197 fold_build1 (code, type,
2198 max_size (TREE_OPERAND (exp, 0),
2199 code == NEGATE_EXPR ? !max_p : max_p));
2202 if (code == COMPOUND_EXPR)
2203 return max_size (TREE_OPERAND (exp, 1), max_p);
2205 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2206 may provide a tighter bound on max_size. */
2207 if (code == MINUS_EXPR
2208 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2210 tree lhs = fold_build2 (MINUS_EXPR, type,
2211 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2212 TREE_OPERAND (exp, 1));
2213 tree rhs = fold_build2 (MINUS_EXPR, type,
2214 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2215 TREE_OPERAND (exp, 1));
2216 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2217 max_size (lhs, max_p),
2218 max_size (rhs, max_p));
2222 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2223 tree rhs = max_size (TREE_OPERAND (exp, 1),
2224 code == MINUS_EXPR ? !max_p : max_p);
2226 /* Special-case wanting the maximum value of a MIN_EXPR.
2227 In that case, if one side overflows, return the other.
2228 sizetype is signed, but we know sizes are non-negative.
2229 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2230 overflowing or the maximum possible value and the RHS
2234 && TREE_CODE (rhs) == INTEGER_CST
2235 && TREE_OVERFLOW (rhs))
2239 && TREE_CODE (lhs) == INTEGER_CST
2240 && TREE_OVERFLOW (lhs))
2242 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2243 && ((TREE_CODE (lhs) == INTEGER_CST
2244 && TREE_OVERFLOW (lhs))
2245 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2246 && !TREE_CONSTANT (rhs))
2249 return fold_build2 (code, type, lhs, rhs);
2253 if (code == SAVE_EXPR)
2255 else if (code == COND_EXPR)
2256 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2257 max_size (TREE_OPERAND (exp, 1), max_p),
2258 max_size (TREE_OPERAND (exp, 2), max_p));
2261 /* Other tree classes cannot happen. */
2269 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2270 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2271 Return a constructor for the template. */
2274 build_template (tree template_type, tree array_type, tree expr)
2276 tree template_elts = NULL_TREE;
2277 tree bound_list = NULL_TREE;
2280 while (TREE_CODE (array_type) == RECORD_TYPE
2281 && (TYPE_PADDING_P (array_type)
2282 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2283 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2285 if (TREE_CODE (array_type) == ARRAY_TYPE
2286 || (TREE_CODE (array_type) == INTEGER_TYPE
2287 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2288 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2290 /* First make the list for a CONSTRUCTOR for the template. Go down the
2291 field list of the template instead of the type chain because this
2292 array might be an Ada array of arrays and we can't tell where the
2293 nested arrays stop being the underlying object. */
2295 for (field = TYPE_FIELDS (template_type); field;
2297 ? (bound_list = TREE_CHAIN (bound_list))
2298 : (array_type = TREE_TYPE (array_type))),
2299 field = TREE_CHAIN (TREE_CHAIN (field)))
2301 tree bounds, min, max;
2303 /* If we have a bound list, get the bounds from there. Likewise
2304 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2305 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2306 This will give us a maximum range. */
2308 bounds = TREE_VALUE (bound_list);
2309 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2310 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2311 else if (expr && TREE_CODE (expr) == PARM_DECL
2312 && DECL_BY_COMPONENT_PTR_P (expr))
2313 bounds = TREE_TYPE (field);
2317 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2318 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2320 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2321 substitute it from OBJECT. */
2322 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2323 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2325 template_elts = tree_cons (TREE_CHAIN (field), max,
2326 tree_cons (field, min, template_elts));
2329 return gnat_build_constructor (template_type, nreverse (template_elts));
2332 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2333 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2334 in the type contains in its DECL_INITIAL the expression to use when
2335 a constructor is made for the type. GNAT_ENTITY is an entity used
2336 to print out an error message if the mechanism cannot be applied to
2337 an object of that type and also for the name. */
2340 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2342 tree record_type = make_node (RECORD_TYPE);
2343 tree pointer32_type;
2344 tree field_list = 0;
2353 /* If TYPE is an unconstrained array, use the underlying array type. */
2354 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2355 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2357 /* If this is an array, compute the number of dimensions in the array,
2358 get the index types, and point to the inner type. */
2359 if (TREE_CODE (type) != ARRAY_TYPE)
2362 for (ndim = 1, inner_type = type;
2363 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2364 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2365 ndim++, inner_type = TREE_TYPE (inner_type))
2368 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2370 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2371 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2372 for (i = ndim - 1, inner_type = type;
2374 i--, inner_type = TREE_TYPE (inner_type))
2375 idx_arr[i] = TYPE_DOMAIN (inner_type);
2377 for (i = 0, inner_type = type;
2379 i++, inner_type = TREE_TYPE (inner_type))
2380 idx_arr[i] = TYPE_DOMAIN (inner_type);
2382 /* Now get the DTYPE value. */
2383 switch (TREE_CODE (type))
2388 if (TYPE_VAX_FLOATING_POINT_P (type))
2389 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2402 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2405 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2408 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2411 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2414 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2417 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2423 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2427 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2428 && TYPE_VAX_FLOATING_POINT_P (type))
2429 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2441 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2452 /* Get the CLASS value. */
2455 case By_Descriptor_A:
2456 case By_Short_Descriptor_A:
2459 case By_Descriptor_NCA:
2460 case By_Short_Descriptor_NCA:
2463 case By_Descriptor_SB:
2464 case By_Short_Descriptor_SB:
2468 case By_Short_Descriptor:
2469 case By_Descriptor_S:
2470 case By_Short_Descriptor_S:
2476 /* Make the type for a descriptor for VMS. The first four fields
2477 are the same for all types. */
2480 = chainon (field_list,
2481 make_descriptor_field
2482 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2483 size_in_bytes ((mech == By_Descriptor_A ||
2484 mech == By_Short_Descriptor_A)
2485 ? inner_type : type)));
2487 field_list = chainon (field_list,
2488 make_descriptor_field ("DTYPE",
2489 gnat_type_for_size (8, 1),
2490 record_type, size_int (dtype)));
2491 field_list = chainon (field_list,
2492 make_descriptor_field ("CLASS",
2493 gnat_type_for_size (8, 1),
2494 record_type, size_int (klass)));
2496 /* Of course this will crash at run-time if the address space is not
2497 within the low 32 bits, but there is nothing else we can do. */
2498 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2501 = chainon (field_list,
2502 make_descriptor_field
2503 ("POINTER", pointer32_type, record_type,
2504 build_unary_op (ADDR_EXPR,
2506 build0 (PLACEHOLDER_EXPR, type))));
2511 case By_Short_Descriptor:
2512 case By_Descriptor_S:
2513 case By_Short_Descriptor_S:
2516 case By_Descriptor_SB:
2517 case By_Short_Descriptor_SB:
2519 = chainon (field_list,
2520 make_descriptor_field
2521 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2522 TREE_CODE (type) == ARRAY_TYPE
2523 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2525 = chainon (field_list,
2526 make_descriptor_field
2527 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2528 TREE_CODE (type) == ARRAY_TYPE
2529 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2532 case By_Descriptor_A:
2533 case By_Short_Descriptor_A:
2534 case By_Descriptor_NCA:
2535 case By_Short_Descriptor_NCA:
2536 field_list = chainon (field_list,
2537 make_descriptor_field ("SCALE",
2538 gnat_type_for_size (8, 1),
2542 field_list = chainon (field_list,
2543 make_descriptor_field ("DIGITS",
2544 gnat_type_for_size (8, 1),
2549 = chainon (field_list,
2550 make_descriptor_field
2551 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2552 size_int ((mech == By_Descriptor_NCA ||
2553 mech == By_Short_Descriptor_NCA)
2555 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2556 : (TREE_CODE (type) == ARRAY_TYPE
2557 && TYPE_CONVENTION_FORTRAN_P (type)
2560 field_list = chainon (field_list,
2561 make_descriptor_field ("DIMCT",
2562 gnat_type_for_size (8, 1),
2566 field_list = chainon (field_list,
2567 make_descriptor_field ("ARSIZE",
2568 gnat_type_for_size (32, 1),
2570 size_in_bytes (type)));
2572 /* Now build a pointer to the 0,0,0... element. */
2573 tem = build0 (PLACEHOLDER_EXPR, type);
2574 for (i = 0, inner_type = type; i < ndim;
2575 i++, inner_type = TREE_TYPE (inner_type))
2576 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2577 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2578 NULL_TREE, NULL_TREE);
2581 = chainon (field_list,
2582 make_descriptor_field
2584 build_pointer_type_for_mode (inner_type, SImode, false),
2587 build_pointer_type_for_mode (inner_type, SImode,
2591 /* Next come the addressing coefficients. */
2592 tem = size_one_node;
2593 for (i = 0; i < ndim; i++)
2597 = size_binop (MULT_EXPR, tem,
2598 size_binop (PLUS_EXPR,
2599 size_binop (MINUS_EXPR,
2600 TYPE_MAX_VALUE (idx_arr[i]),
2601 TYPE_MIN_VALUE (idx_arr[i])),
2604 fname[0] = ((mech == By_Descriptor_NCA ||
2605 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2606 fname[1] = '0' + i, fname[2] = 0;
2608 = chainon (field_list,
2609 make_descriptor_field (fname,
2610 gnat_type_for_size (32, 1),
2611 record_type, idx_length));
2613 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2617 /* Finally here are the bounds. */
2618 for (i = 0; i < ndim; i++)
2622 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2624 = chainon (field_list,
2625 make_descriptor_field
2626 (fname, gnat_type_for_size (32, 1), record_type,
2627 TYPE_MIN_VALUE (idx_arr[i])));
2631 = chainon (field_list,
2632 make_descriptor_field
2633 (fname, gnat_type_for_size (32, 1), record_type,
2634 TYPE_MAX_VALUE (idx_arr[i])));
2639 post_error ("unsupported descriptor type for &", gnat_entity);
2642 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2643 finish_record_type (record_type, field_list, 0, false);
2647 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2648 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2649 in the type contains in its DECL_INITIAL the expression to use when
2650 a constructor is made for the type. GNAT_ENTITY is an entity used
2651 to print out an error message if the mechanism cannot be applied to
2652 an object of that type and also for the name. */
2655 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2657 tree record64_type = make_node (RECORD_TYPE);
2658 tree pointer64_type;
2659 tree field_list64 = 0;
2668 /* If TYPE is an unconstrained array, use the underlying array type. */
2669 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2670 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2672 /* If this is an array, compute the number of dimensions in the array,
2673 get the index types, and point to the inner type. */
2674 if (TREE_CODE (type) != ARRAY_TYPE)
2677 for (ndim = 1, inner_type = type;
2678 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2679 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2680 ndim++, inner_type = TREE_TYPE (inner_type))
2683 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2685 if (mech != By_Descriptor_NCA
2686 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2687 for (i = ndim - 1, inner_type = type;
2689 i--, inner_type = TREE_TYPE (inner_type))
2690 idx_arr[i] = TYPE_DOMAIN (inner_type);
2692 for (i = 0, inner_type = type;
2694 i++, inner_type = TREE_TYPE (inner_type))
2695 idx_arr[i] = TYPE_DOMAIN (inner_type);
2697 /* Now get the DTYPE value. */
2698 switch (TREE_CODE (type))
2703 if (TYPE_VAX_FLOATING_POINT_P (type))
2704 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2717 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2720 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2723 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2726 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2729 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2732 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2738 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2742 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2743 && TYPE_VAX_FLOATING_POINT_P (type))
2744 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2756 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2767 /* Get the CLASS value. */
2770 case By_Descriptor_A:
2773 case By_Descriptor_NCA:
2776 case By_Descriptor_SB:
2780 case By_Descriptor_S:
2786 /* Make the type for a 64bit descriptor for VMS. The first six fields
2787 are the same for all types. */
2789 field_list64 = chainon (field_list64,
2790 make_descriptor_field ("MBO",
2791 gnat_type_for_size (16, 1),
2792 record64_type, size_int (1)));
2794 field_list64 = chainon (field_list64,
2795 make_descriptor_field ("DTYPE",
2796 gnat_type_for_size (8, 1),
2797 record64_type, size_int (dtype)));
2798 field_list64 = chainon (field_list64,
2799 make_descriptor_field ("CLASS",
2800 gnat_type_for_size (8, 1),
2801 record64_type, size_int (klass)));
2803 field_list64 = chainon (field_list64,
2804 make_descriptor_field ("MBMO",
2805 gnat_type_for_size (32, 1),
2806 record64_type, ssize_int (-1)));
2809 = chainon (field_list64,
2810 make_descriptor_field
2811 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2812 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2814 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2817 = chainon (field_list64,
2818 make_descriptor_field
2819 ("POINTER", pointer64_type, record64_type,
2820 build_unary_op (ADDR_EXPR,
2822 build0 (PLACEHOLDER_EXPR, type))));
2827 case By_Descriptor_S:
2830 case By_Descriptor_SB:
2832 = chainon (field_list64,
2833 make_descriptor_field
2834 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2835 TREE_CODE (type) == ARRAY_TYPE
2836 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2838 = chainon (field_list64,
2839 make_descriptor_field
2840 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2841 TREE_CODE (type) == ARRAY_TYPE
2842 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2845 case By_Descriptor_A:
2846 case By_Descriptor_NCA:
2847 field_list64 = chainon (field_list64,
2848 make_descriptor_field ("SCALE",
2849 gnat_type_for_size (8, 1),
2853 field_list64 = chainon (field_list64,
2854 make_descriptor_field ("DIGITS",
2855 gnat_type_for_size (8, 1),
2860 = chainon (field_list64,
2861 make_descriptor_field
2862 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2863 size_int (mech == By_Descriptor_NCA
2865 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2866 : (TREE_CODE (type) == ARRAY_TYPE
2867 && TYPE_CONVENTION_FORTRAN_P (type)
2870 field_list64 = chainon (field_list64,
2871 make_descriptor_field ("DIMCT",
2872 gnat_type_for_size (8, 1),
2876 field_list64 = chainon (field_list64,
2877 make_descriptor_field ("MBZ",
2878 gnat_type_for_size (32, 1),
2881 field_list64 = chainon (field_list64,
2882 make_descriptor_field ("ARSIZE",
2883 gnat_type_for_size (64, 1),
2885 size_in_bytes (type)));
2887 /* Now build a pointer to the 0,0,0... element. */
2888 tem = build0 (PLACEHOLDER_EXPR, type);
2889 for (i = 0, inner_type = type; i < ndim;
2890 i++, inner_type = TREE_TYPE (inner_type))
2891 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2892 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2893 NULL_TREE, NULL_TREE);
2896 = chainon (field_list64,
2897 make_descriptor_field
2899 build_pointer_type_for_mode (inner_type, DImode, false),
2902 build_pointer_type_for_mode (inner_type, DImode,
2906 /* Next come the addressing coefficients. */
2907 tem = size_one_node;
2908 for (i = 0; i < ndim; i++)
2912 = size_binop (MULT_EXPR, tem,
2913 size_binop (PLUS_EXPR,
2914 size_binop (MINUS_EXPR,
2915 TYPE_MAX_VALUE (idx_arr[i]),
2916 TYPE_MIN_VALUE (idx_arr[i])),
2919 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2920 fname[1] = '0' + i, fname[2] = 0;
2922 = chainon (field_list64,
2923 make_descriptor_field (fname,
2924 gnat_type_for_size (64, 1),
2925 record64_type, idx_length));
2927 if (mech == By_Descriptor_NCA)
2931 /* Finally here are the bounds. */
2932 for (i = 0; i < ndim; i++)
2936 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2938 = chainon (field_list64,
2939 make_descriptor_field
2940 (fname, gnat_type_for_size (64, 1), record64_type,
2941 TYPE_MIN_VALUE (idx_arr[i])));
2945 = chainon (field_list64,
2946 make_descriptor_field
2947 (fname, gnat_type_for_size (64, 1), record64_type,
2948 TYPE_MAX_VALUE (idx_arr[i])));
2953 post_error ("unsupported descriptor type for &", gnat_entity);
2956 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
2957 finish_record_type (record64_type, field_list64, 0, false);
2958 return record64_type;
2961 /* Utility routine for above code to make a field. */
2964 make_descriptor_field (const char *name, tree type,
2965 tree rec_type, tree initial)
2968 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2970 DECL_INITIAL (field) = initial;
2974 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
2975 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
2976 which the VMS descriptor is passed. */
2979 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2981 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2982 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2983 /* The CLASS field is the 3rd field in the descriptor. */
2984 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2985 /* The POINTER field is the 6th field in the descriptor. */
2986 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
2988 /* Retrieve the value of the POINTER field. */
2990 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
2992 if (POINTER_TYPE_P (gnu_type))
2993 return convert (gnu_type, gnu_expr64);
2995 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
2997 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2998 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2999 tree template_type = TREE_TYPE (p_bounds_type);
3000 tree min_field = TYPE_FIELDS (template_type);
3001 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3002 tree template_tree, template_addr, aflags, dimct, t, u;
3003 /* See the head comment of build_vms_descriptor. */
3004 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3005 tree lfield, ufield;
3007 /* Convert POINTER to the type of the P_ARRAY field. */
3008 gnu_expr64 = convert (p_array_type, gnu_expr64);
3012 case 1: /* Class S */
3013 case 15: /* Class SB */
3014 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3015 t = TREE_CHAIN (TREE_CHAIN (klass));
3016 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3017 t = tree_cons (min_field,
3018 convert (TREE_TYPE (min_field), integer_one_node),
3019 tree_cons (max_field,
3020 convert (TREE_TYPE (max_field), t),
3022 template_tree = gnat_build_constructor (template_type, t);
3023 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3025 /* For class S, we are done. */
3029 /* Test that we really have a SB descriptor, like DEC Ada. */
3030 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3031 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3032 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3033 /* If so, there is already a template in the descriptor and
3034 it is located right after the POINTER field. The fields are
3035 64bits so they must be repacked. */
3036 t = TREE_CHAIN (pointer64);
3037 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3038 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3041 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3043 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3045 /* Build the template in the form of a constructor. */
3046 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3047 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3048 ufield, NULL_TREE));
3049 template_tree = gnat_build_constructor (template_type, t);
3051 /* Otherwise use the {1, LENGTH} template we build above. */
3052 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3053 build_unary_op (ADDR_EXPR, p_bounds_type,
3058 case 4: /* Class A */
3059 /* The AFLAGS field is the 3rd field after the pointer in the
3061 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3062 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3063 /* The DIMCT field is the next field in the descriptor after
3066 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3067 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3068 or FL_COEFF or FL_BOUNDS not set. */
3069 u = build_int_cst (TREE_TYPE (aflags), 192);
3070 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3071 build_binary_op (NE_EXPR, integer_type_node,
3073 convert (TREE_TYPE (dimct),
3075 build_binary_op (NE_EXPR, integer_type_node,
3076 build2 (BIT_AND_EXPR,
3080 /* There is already a template in the descriptor and it is located
3081 in block 3. The fields are 64bits so they must be repacked. */
3082 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3084 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3085 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3088 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3090 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3092 /* Build the template in the form of a constructor. */
3093 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3094 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3095 ufield, NULL_TREE));
3096 template_tree = gnat_build_constructor (template_type, t);
3097 template_tree = build3 (COND_EXPR, template_type, u,
3098 build_call_raise (CE_Length_Check_Failed, Empty,
3099 N_Raise_Constraint_Error),
3102 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3105 case 10: /* Class NCA */
3107 post_error ("unsupported descriptor type for &", gnat_subprog);
3108 template_addr = integer_zero_node;
3112 /* Build the fat pointer in the form of a constructor. */
3113 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3114 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3115 template_addr, NULL_TREE));
3116 return gnat_build_constructor (gnu_type, t);
3123 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3124 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3125 which the VMS descriptor is passed. */
3128 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3130 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3131 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3132 /* The CLASS field is the 3rd field in the descriptor. */
3133 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3134 /* The POINTER field is the 4th field in the descriptor. */
3135 tree pointer = TREE_CHAIN (klass);
3137 /* Retrieve the value of the POINTER field. */
3139 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3141 if (POINTER_TYPE_P (gnu_type))
3142 return convert (gnu_type, gnu_expr32);
3144 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3146 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3147 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3148 tree template_type = TREE_TYPE (p_bounds_type);
3149 tree min_field = TYPE_FIELDS (template_type);
3150 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3151 tree template_tree, template_addr, aflags, dimct, t, u;
3152 /* See the head comment of build_vms_descriptor. */
3153 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3155 /* Convert POINTER to the type of the P_ARRAY field. */
3156 gnu_expr32 = convert (p_array_type, gnu_expr32);
3160 case 1: /* Class S */
3161 case 15: /* Class SB */
3162 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3163 t = TYPE_FIELDS (desc_type);
3164 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3165 t = tree_cons (min_field,
3166 convert (TREE_TYPE (min_field), integer_one_node),
3167 tree_cons (max_field,
3168 convert (TREE_TYPE (max_field), t),
3170 template_tree = gnat_build_constructor (template_type, t);
3171 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3173 /* For class S, we are done. */
3177 /* Test that we really have a SB descriptor, like DEC Ada. */
3178 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3179 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3180 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3181 /* If so, there is already a template in the descriptor and
3182 it is located right after the POINTER field. */
3183 t = TREE_CHAIN (pointer);
3185 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3186 /* Otherwise use the {1, LENGTH} template we build above. */
3187 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3188 build_unary_op (ADDR_EXPR, p_bounds_type,
3193 case 4: /* Class A */
3194 /* The AFLAGS field is the 7th field in the descriptor. */
3195 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3196 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3197 /* The DIMCT field is the 8th field in the descriptor. */
3199 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3200 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3201 or FL_COEFF or FL_BOUNDS not set. */
3202 u = build_int_cst (TREE_TYPE (aflags), 192);
3203 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3204 build_binary_op (NE_EXPR, integer_type_node,
3206 convert (TREE_TYPE (dimct),
3208 build_binary_op (NE_EXPR, integer_type_node,
3209 build2 (BIT_AND_EXPR,
3213 /* There is already a template in the descriptor and it is
3214 located at the start of block 3 (12th field). */