1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
32 #include "coretypes.h"
44 #include "langhooks.h"
45 #include "pointer-set.h"
47 #include "tree-dump.h"
48 #include "tree-inline.h"
49 #include "tree-iterator.h"
66 #ifndef MAX_FIXED_MODE_SIZE
67 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
70 #ifndef MAX_BITS_PER_WORD
71 #define MAX_BITS_PER_WORD BITS_PER_WORD
74 /* If nonzero, pretend we are allocating at global level. */
77 /* The default alignment of "double" floating-point types, i.e. floating
78 point types whose size is equal to 64 bits, or 0 if this alignment is
79 not specifically capped. */
80 int double_float_alignment;
82 /* The default alignment of "double" or larger scalar types, i.e. scalar
83 types whose size is greater or equal to 64 bits, or 0 if this alignment
84 is not specifically capped. */
85 int double_scalar_alignment;
87 /* Tree nodes for the various types and decls we create. */
88 tree gnat_std_decls[(int) ADT_LAST];
90 /* Functions to call for each of the possible raise reasons. */
91 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
93 /* Forward declarations for handlers of attributes. */
94 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
105 /* Fake handler for attributes we don't properly support, typically because
106 they'd require dragging a lot of the common-c front-end circuitry. */
107 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
109 /* Table of machine-independent internal attributes for Ada. We support
110 this minimal set of attributes to accommodate the needs of builtins. */
111 const struct attribute_spec gnat_internal_attribute_table[] =
113 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
114 { "const", 0, 0, true, false, false, handle_const_attribute },
115 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute },
117 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
118 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
119 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
121 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
122 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
124 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
126 /* ??? format and format_arg are heavy and not supported, which actually
127 prevents support for stdio builtins, which we however declare as part
128 of the common builtins.def contents. */
129 { "format", 3, 3, false, true, true, fake_attribute_handler },
130 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
132 { NULL, 0, 0, false, false, false, NULL }
135 /* Associates a GNAT tree node to a GCC tree node. It is used in
136 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
137 of `save_gnu_tree' for more info. */
138 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
140 #define GET_GNU_TREE(GNAT_ENTITY) \
141 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
143 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
144 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
146 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
147 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
149 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
150 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
152 #define GET_DUMMY_NODE(GNAT_ENTITY) \
153 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
155 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
156 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
159 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161 /* This variable keeps a table for types for each precision so that we only
162 allocate each of them once. Signed and unsigned types are kept separate.
164 Note that these types are only used when fold-const requests something
165 special. Perhaps we should NOT share these types; we'll see how it
167 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
169 /* Likewise for float types, but record these by mode. */
170 static GTY(()) tree float_types[NUM_MACHINE_MODES];
172 /* For each binding contour we allocate a binding_level structure to indicate
173 the binding depth. */
175 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
176 /* The binding level containing this one (the enclosing binding level). */
177 struct gnat_binding_level *chain;
178 /* The BLOCK node for this level. */
180 /* If nonzero, the setjmp buffer that needs to be updated for any
181 variable-sized definition within this context. */
185 /* The binding level currently in effect. */
186 static GTY(()) struct gnat_binding_level *current_binding_level;
188 /* A chain of gnat_binding_level structures awaiting reuse. */
189 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
191 /* An array of global declarations. */
192 static GTY(()) VEC(tree,gc) *global_decls;
194 /* An array of builtin function declarations. */
195 static GTY(()) VEC(tree,gc) *builtin_decls;
197 /* An array of global renaming pointers. */
198 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
200 /* A chain of unused BLOCK nodes. */
201 static GTY((deletable)) tree free_block_chain;
203 static tree merge_sizes (tree, tree, tree, bool, bool);
204 static tree compute_related_constant (tree, tree);
205 static tree split_plus (tree, tree *);
206 static void gnat_gimplify_function (tree);
207 static tree float_type_for_precision (int, enum machine_mode);
208 static tree convert_to_fat_pointer (tree, tree);
209 static tree convert_to_thin_pointer (tree, tree);
210 static tree make_descriptor_field (const char *,tree, tree, tree);
211 static bool potential_alignment_gap (tree, tree, tree);
213 /* Initialize the association of GNAT nodes to GCC trees. */
216 init_gnat_to_gnu (void)
218 associate_gnat_to_gnu
219 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
222 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
223 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
224 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
226 If GNU_DECL is zero, a previous association is to be reset. */
229 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
231 /* Check that GNAT_ENTITY is not already defined and that it is being set
232 to something which is a decl. Raise gigi 401 if not. Usually, this
233 means GNAT_ENTITY is defined twice, but occasionally is due to some
235 gcc_assert (!(gnu_decl
236 && (PRESENT_GNU_TREE (gnat_entity)
237 || (!no_check && !DECL_P (gnu_decl)))));
239 SET_GNU_TREE (gnat_entity, gnu_decl);
242 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
243 Return the ..._DECL node that was associated with it. If there is no tree
244 node associated with GNAT_ENTITY, abort.
246 In some cases, such as delayed elaboration or expressions that need to
247 be elaborated only once, GNAT_ENTITY is really not an entity. */
250 get_gnu_tree (Entity_Id gnat_entity)
252 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
253 return GET_GNU_TREE (gnat_entity);
256 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
259 present_gnu_tree (Entity_Id gnat_entity)
261 return PRESENT_GNU_TREE (gnat_entity);
264 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
267 init_dummy_type (void)
270 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
273 /* Make a dummy type corresponding to GNAT_TYPE. */
276 make_dummy_type (Entity_Id gnat_type)
278 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
281 /* If there is an equivalent type, get its underlying type. */
282 if (Present (gnat_underlying))
283 gnat_underlying = Underlying_Type (gnat_underlying);
285 /* If there was no equivalent type (can only happen when just annotating
286 types) or underlying type, go back to the original type. */
287 if (No (gnat_underlying))
288 gnat_underlying = gnat_type;
290 /* If it there already a dummy type, use that one. Else make one. */
291 if (PRESENT_DUMMY_NODE (gnat_underlying))
292 return GET_DUMMY_NODE (gnat_underlying);
294 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
296 gnu_type = make_node (Is_Record_Type (gnat_underlying)
297 ? tree_code_for_record_type (gnat_underlying)
299 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
300 TYPE_DUMMY_P (gnu_type) = 1;
301 TYPE_STUB_DECL (gnu_type)
302 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
303 if (AGGREGATE_TYPE_P (gnu_type))
304 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
306 SET_DUMMY_NODE (gnat_underlying, gnu_type);
311 /* Return nonzero if we are currently in the global binding level. */
314 global_bindings_p (void)
316 return ((force_global || !current_function_decl) ? -1 : 0);
319 /* Enter a new binding level. */
322 gnat_pushlevel (void)
324 struct gnat_binding_level *newlevel = NULL;
326 /* Reuse a struct for this binding level, if there is one. */
327 if (free_binding_level)
329 newlevel = free_binding_level;
330 free_binding_level = free_binding_level->chain;
334 = (struct gnat_binding_level *)
335 ggc_alloc (sizeof (struct gnat_binding_level));
337 /* Use a free BLOCK, if any; otherwise, allocate one. */
338 if (free_block_chain)
340 newlevel->block = free_block_chain;
341 free_block_chain = BLOCK_CHAIN (free_block_chain);
342 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
345 newlevel->block = make_node (BLOCK);
347 /* Point the BLOCK we just made to its parent. */
348 if (current_binding_level)
349 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
351 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
352 TREE_USED (newlevel->block) = 1;
354 /* Add this level to the front of the chain (stack) of levels that are
356 newlevel->chain = current_binding_level;
357 newlevel->jmpbuf_decl = NULL_TREE;
358 current_binding_level = newlevel;
361 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
362 and point FNDECL to this BLOCK. */
365 set_current_block_context (tree fndecl)
367 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
368 DECL_INITIAL (fndecl) = current_binding_level->block;
371 /* Set the jmpbuf_decl for the current binding level to DECL. */
374 set_block_jmpbuf_decl (tree decl)
376 current_binding_level->jmpbuf_decl = decl;
379 /* Get the jmpbuf_decl, if any, for the current binding level. */
382 get_block_jmpbuf_decl (void)
384 return current_binding_level->jmpbuf_decl;
387 /* Exit a binding level. Set any BLOCK into the current code group. */
392 struct gnat_binding_level *level = current_binding_level;
393 tree block = level->block;
395 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
396 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
398 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
399 are no variables free the block and merge its subblocks into those of its
400 parent block. Otherwise, add it to the list of its parent. */
401 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
403 else if (BLOCK_VARS (block) == NULL_TREE)
405 BLOCK_SUBBLOCKS (level->chain->block)
406 = chainon (BLOCK_SUBBLOCKS (block),
407 BLOCK_SUBBLOCKS (level->chain->block));
408 BLOCK_CHAIN (block) = free_block_chain;
409 free_block_chain = block;
413 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
414 BLOCK_SUBBLOCKS (level->chain->block) = block;
415 TREE_USED (block) = 1;
416 set_block_for_group (block);
419 /* Free this binding structure. */
420 current_binding_level = level->chain;
421 level->chain = free_binding_level;
422 free_binding_level = level;
426 /* Records a ..._DECL node DECL as belonging to the current lexical scope
427 and uses GNAT_NODE for location information and propagating flags. */
430 gnat_pushdecl (tree decl, Node_Id gnat_node)
432 /* If this decl is public external or at toplevel, there is no context.
433 But PARM_DECLs always go in the level of its function. */
434 if (TREE_CODE (decl) != PARM_DECL
435 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
436 || global_bindings_p ()))
437 DECL_CONTEXT (decl) = 0;
440 DECL_CONTEXT (decl) = current_function_decl;
442 /* Functions imported in another function are not really nested. */
443 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
444 DECL_NO_STATIC_CHAIN (decl) = 1;
447 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
449 /* Set the location of DECL and emit a declaration for it. */
450 if (Present (gnat_node))
451 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
452 add_decl_expr (decl, gnat_node);
454 /* Put the declaration on the list. The list of declarations is in reverse
455 order. The list will be reversed later. Put global variables in the
456 globals list and builtin functions in a dedicated list to speed up
457 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
458 the list, as they will cause trouble with the debugger and aren't needed
460 if (TREE_CODE (decl) != TYPE_DECL
461 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
463 if (global_bindings_p ())
465 VEC_safe_push (tree, gc, global_decls, decl);
467 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
468 VEC_safe_push (tree, gc, builtin_decls, decl);
472 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
473 BLOCK_VARS (current_binding_level->block) = decl;
477 /* For the declaration of a type, set its name if it either is not already
478 set or if the previous type name was not derived from a source name.
479 We'd rather have the type named with a real name and all the pointer
480 types to the same object have the same POINTER_TYPE node. Code in the
481 equivalent function of c-decl.c makes a copy of the type node here, but
482 that may cause us trouble with incomplete types. We make an exception
483 for fat pointer types because the compiler automatically builds them
484 for unconstrained array types and the debugger uses them to represent
485 both these and pointers to these. */
486 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
488 tree t = TREE_TYPE (decl);
490 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
492 else if (TYPE_FAT_POINTER_P (t))
494 tree tt = build_variant_type_copy (t);
495 TYPE_NAME (tt) = decl;
496 TREE_USED (tt) = TREE_USED (t);
497 TREE_TYPE (decl) = tt;
498 DECL_ORIGINAL_TYPE (decl) = t;
501 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
506 /* Propagate the name to all the variants. This is needed for
507 the type qualifiers machinery to work properly. */
509 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
510 TYPE_NAME (t) = decl;
514 /* Do little here. Set up the standard declarations later after the
515 front end has been run. */
518 gnat_init_decl_processing (void)
520 /* Make the binding_level structure for global names. */
521 current_function_decl = 0;
522 current_binding_level = 0;
523 free_binding_level = 0;
526 build_common_tree_nodes (true, true);
528 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
529 corresponding to the width of Pmode. In most cases when ptr_mode
530 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
531 But we get far better code using the width of Pmode. */
532 size_type_node = gnat_type_for_mode (Pmode, 0);
533 set_sizetype (size_type_node);
535 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
536 boolean_type_node = make_unsigned_type (8);
537 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
538 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
539 build_int_cst (boolean_type_node, 1));
540 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
542 build_common_tree_nodes_2 (0);
543 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
545 ptr_void_type_node = build_pointer_type (void_type_node);
548 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
551 record_builtin_type (const char *name, tree type)
553 tree type_decl = build_decl (input_location,
554 TYPE_DECL, get_identifier (name), type);
556 gnat_pushdecl (type_decl, Empty);
558 if (debug_hooks->type_decl)
559 debug_hooks->type_decl (type_decl, false);
562 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
563 finish constructing the record or union type. If REP_LEVEL is zero, this
564 record has no representation clause and so will be entirely laid out here.
565 If REP_LEVEL is one, this record has a representation clause and has been
566 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
567 this record is derived from a parent record and thus inherits its layout;
568 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
569 true, the record type is expected to be modified afterwards so it will
570 not be sent to the back-end for finalization. */
573 finish_record_type (tree record_type, tree fieldlist, int rep_level,
574 bool do_not_finalize)
576 enum tree_code code = TREE_CODE (record_type);
577 tree name = TYPE_NAME (record_type);
578 tree ada_size = bitsize_zero_node;
579 tree size = bitsize_zero_node;
580 bool had_size = TYPE_SIZE (record_type) != 0;
581 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
582 bool had_align = TYPE_ALIGN (record_type) != 0;
585 TYPE_FIELDS (record_type) = fieldlist;
587 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
588 generate debug info and have a parallel type. */
589 if (name && TREE_CODE (name) == TYPE_DECL)
590 name = DECL_NAME (name);
591 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
593 /* Globally initialize the record first. If this is a rep'ed record,
594 that just means some initializations; otherwise, layout the record. */
597 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
598 SET_TYPE_MODE (record_type, BLKmode);
601 TYPE_SIZE_UNIT (record_type) = size_zero_node;
603 TYPE_SIZE (record_type) = bitsize_zero_node;
605 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
606 out just like a UNION_TYPE, since the size will be fixed. */
607 else if (code == QUAL_UNION_TYPE)
612 /* Ensure there isn't a size already set. There can be in an error
613 case where there is a rep clause but all fields have errors and
614 no longer have a position. */
615 TYPE_SIZE (record_type) = 0;
616 layout_type (record_type);
619 /* At this point, the position and size of each field is known. It was
620 either set before entry by a rep clause, or by laying out the type above.
622 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
623 to compute the Ada size; the GCC size and alignment (for rep'ed records
624 that are not padding types); and the mode (for rep'ed records). We also
625 clear the DECL_BIT_FIELD indication for the cases we know have not been
626 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
628 if (code == QUAL_UNION_TYPE)
629 fieldlist = nreverse (fieldlist);
631 for (field = fieldlist; field; field = TREE_CHAIN (field))
633 tree type = TREE_TYPE (field);
634 tree pos = bit_position (field);
635 tree this_size = DECL_SIZE (field);
638 if ((TREE_CODE (type) == RECORD_TYPE
639 || TREE_CODE (type) == UNION_TYPE
640 || TREE_CODE (type) == QUAL_UNION_TYPE)
641 && !TYPE_IS_FAT_POINTER_P (type)
642 && !TYPE_CONTAINS_TEMPLATE_P (type)
643 && TYPE_ADA_SIZE (type))
644 this_ada_size = TYPE_ADA_SIZE (type);
646 this_ada_size = this_size;
648 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
649 if (DECL_BIT_FIELD (field)
650 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
652 unsigned int align = TYPE_ALIGN (type);
654 /* In the general case, type alignment is required. */
655 if (value_factor_p (pos, align))
657 /* The enclosing record type must be sufficiently aligned.
658 Otherwise, if no alignment was specified for it and it
659 has been laid out already, bump its alignment to the
660 desired one if this is compatible with its size. */
661 if (TYPE_ALIGN (record_type) >= align)
663 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
664 DECL_BIT_FIELD (field) = 0;
668 && value_factor_p (TYPE_SIZE (record_type), align))
670 TYPE_ALIGN (record_type) = align;
671 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
672 DECL_BIT_FIELD (field) = 0;
676 /* In the non-strict alignment case, only byte alignment is. */
677 if (!STRICT_ALIGNMENT
678 && DECL_BIT_FIELD (field)
679 && value_factor_p (pos, BITS_PER_UNIT))
680 DECL_BIT_FIELD (field) = 0;
683 /* If we still have DECL_BIT_FIELD set at this point, we know that the
684 field is technically not addressable. Except that it can actually
685 be addressed if it is BLKmode and happens to be properly aligned. */
686 if (DECL_BIT_FIELD (field)
687 && !(DECL_MODE (field) == BLKmode
688 && value_factor_p (pos, BITS_PER_UNIT)))
689 DECL_NONADDRESSABLE_P (field) = 1;
691 /* A type must be as aligned as its most aligned field that is not
692 a bit-field. But this is already enforced by layout_type. */
693 if (rep_level > 0 && !DECL_BIT_FIELD (field))
694 TYPE_ALIGN (record_type)
695 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
700 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
701 size = size_binop (MAX_EXPR, size, this_size);
704 case QUAL_UNION_TYPE:
706 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
707 this_ada_size, ada_size);
708 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
713 /* Since we know here that all fields are sorted in order of
714 increasing bit position, the size of the record is one
715 higher than the ending bit of the last field processed
716 unless we have a rep clause, since in that case we might
717 have a field outside a QUAL_UNION_TYPE that has a higher ending
718 position. So use a MAX in that case. Also, if this field is a
719 QUAL_UNION_TYPE, we need to take into account the previous size in
720 the case of empty variants. */
722 = merge_sizes (ada_size, pos, this_ada_size,
723 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
725 = merge_sizes (size, pos, this_size,
726 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
734 if (code == QUAL_UNION_TYPE)
735 nreverse (fieldlist);
737 /* If the type is discriminated, it can be used to access all its
738 constrained subtypes, so force structural equality checks. */
739 if (CONTAINS_PLACEHOLDER_P (size))
740 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
744 /* If this is a padding record, we never want to make the size smaller
745 than what was specified in it, if any. */
746 if (TREE_CODE (record_type) == RECORD_TYPE
747 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
748 size = TYPE_SIZE (record_type);
750 /* Now set any of the values we've just computed that apply. */
751 if (!TYPE_IS_FAT_POINTER_P (record_type)
752 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
753 SET_TYPE_ADA_SIZE (record_type, ada_size);
757 tree size_unit = had_size_unit
758 ? TYPE_SIZE_UNIT (record_type)
760 size_binop (CEIL_DIV_EXPR, size,
762 unsigned int align = TYPE_ALIGN (record_type);
764 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
765 TYPE_SIZE_UNIT (record_type)
766 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
768 compute_record_mode (record_type);
772 if (!do_not_finalize)
773 rest_of_record_type_compilation (record_type);
776 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
777 the debug information associated with it. It need not be invoked
778 directly in most cases since finish_record_type takes care of doing
779 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
782 rest_of_record_type_compilation (tree record_type)
784 tree fieldlist = TYPE_FIELDS (record_type);
786 enum tree_code code = TREE_CODE (record_type);
787 bool var_size = false;
789 for (field = fieldlist; field; field = TREE_CHAIN (field))
791 /* We need to make an XVE/XVU record if any field has variable size,
792 whether or not the record does. For example, if we have a union,
793 it may be that all fields, rounded up to the alignment, have the
794 same size, in which case we'll use that size. But the debug
795 output routines (except Dwarf2) won't be able to output the fields,
796 so we need to make the special record. */
797 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
798 /* If a field has a non-constant qualifier, the record will have
799 variable size too. */
800 || (code == QUAL_UNION_TYPE
801 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
808 /* If this record is of variable size, rename it so that the
809 debugger knows it is and make a new, parallel, record
810 that tells the debugger how the record is laid out. See
811 exp_dbug.ads. But don't do this for records that are padding
812 since they confuse GDB. */
814 && !(TREE_CODE (record_type) == RECORD_TYPE
815 && TYPE_IS_PADDING_P (record_type)))
818 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
819 ? UNION_TYPE : TREE_CODE (record_type));
820 tree orig_name = TYPE_NAME (record_type), new_name;
821 tree last_pos = bitsize_zero_node;
822 tree old_field, prev_old_field = NULL_TREE;
824 if (TREE_CODE (orig_name) == TYPE_DECL)
825 orig_name = DECL_NAME (orig_name);
828 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
830 TYPE_NAME (new_record_type) = new_name;
831 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
832 TYPE_STUB_DECL (new_record_type)
833 = create_type_stub_decl (new_name, new_record_type);
834 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
835 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
836 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
837 TYPE_SIZE_UNIT (new_record_type)
838 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
840 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
842 /* Now scan all the fields, replacing each field with a new
843 field corresponding to the new encoding. */
844 for (old_field = TYPE_FIELDS (record_type); old_field;
845 old_field = TREE_CHAIN (old_field))
847 tree field_type = TREE_TYPE (old_field);
848 tree field_name = DECL_NAME (old_field);
850 tree curpos = bit_position (old_field);
852 unsigned int align = 0;
855 /* See how the position was modified from the last position.
857 There are two basic cases we support: a value was added
858 to the last position or the last position was rounded to
859 a boundary and they something was added. Check for the
860 first case first. If not, see if there is any evidence
861 of rounding. If so, round the last position and try
864 If this is a union, the position can be taken as zero. */
866 /* Some computations depend on the shape of the position expression,
867 so strip conversions to make sure it's exposed. */
868 curpos = remove_conversions (curpos, true);
870 if (TREE_CODE (new_record_type) == UNION_TYPE)
871 pos = bitsize_zero_node, align = 0;
873 pos = compute_related_constant (curpos, last_pos);
875 if (!pos && TREE_CODE (curpos) == MULT_EXPR
876 && host_integerp (TREE_OPERAND (curpos, 1), 1))
878 tree offset = TREE_OPERAND (curpos, 0);
879 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
881 /* An offset which is a bitwise AND with a negative power of 2
882 means an alignment corresponding to this power of 2. */
883 offset = remove_conversions (offset, true);
884 if (TREE_CODE (offset) == BIT_AND_EXPR
885 && host_integerp (TREE_OPERAND (offset, 1), 0)
886 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
889 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
890 if (exact_log2 (pow) > 0)
894 pos = compute_related_constant (curpos,
895 round_up (last_pos, align));
897 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
898 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
899 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
900 && host_integerp (TREE_OPERAND
901 (TREE_OPERAND (curpos, 0), 1),
906 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
907 pos = compute_related_constant (curpos,
908 round_up (last_pos, align));
910 else if (potential_alignment_gap (prev_old_field, old_field,
913 align = TYPE_ALIGN (field_type);
914 pos = compute_related_constant (curpos,
915 round_up (last_pos, align));
918 /* If we can't compute a position, set it to zero.
920 ??? We really should abort here, but it's too much work
921 to get this correct for all cases. */
924 pos = bitsize_zero_node;
926 /* See if this type is variable-sized and make a pointer type
927 and indicate the indirection if so. Beware that the debug
928 back-end may adjust the position computed above according
929 to the alignment of the field type, i.e. the pointer type
930 in this case, if we don't preventively counter that. */
931 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
933 field_type = build_pointer_type (field_type);
934 if (align != 0 && TYPE_ALIGN (field_type) > align)
936 field_type = copy_node (field_type);
937 TYPE_ALIGN (field_type) = align;
942 /* Make a new field name, if necessary. */
943 if (var || align != 0)
948 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
949 align / BITS_PER_UNIT);
951 strcpy (suffix, "XVL");
953 field_name = concat_name (field_name, suffix);
956 new_field = create_field_decl (field_name, field_type,
958 DECL_SIZE (old_field), pos, 0);
959 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
960 TYPE_FIELDS (new_record_type) = new_field;
962 /* If old_field is a QUAL_UNION_TYPE, take its size as being
963 zero. The only time it's not the last field of the record
964 is when there are other components at fixed positions after
965 it (meaning there was a rep clause for every field) and we
966 want to be able to encode them. */
967 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
968 (TREE_CODE (TREE_TYPE (old_field))
971 : DECL_SIZE (old_field));
972 prev_old_field = old_field;
975 TYPE_FIELDS (new_record_type)
976 = nreverse (TYPE_FIELDS (new_record_type));
978 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
981 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
984 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
987 add_parallel_type (tree decl, tree parallel_type)
991 while (DECL_PARALLEL_TYPE (d))
992 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
994 SET_DECL_PARALLEL_TYPE (d, parallel_type);
997 /* Return the parallel type associated to a type, if any. */
1000 get_parallel_type (tree type)
1002 if (TYPE_STUB_DECL (type))
1003 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1008 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1009 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1010 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1011 replace a value of zero with the old size. If HAS_REP is true, we take the
1012 MAX of the end position of this field with LAST_SIZE. In all other cases,
1013 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1016 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1019 tree type = TREE_TYPE (last_size);
1022 if (!special || TREE_CODE (size) != COND_EXPR)
1024 new_size = size_binop (PLUS_EXPR, first_bit, size);
1026 new_size = size_binop (MAX_EXPR, last_size, new_size);
1030 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1031 integer_zerop (TREE_OPERAND (size, 1))
1032 ? last_size : merge_sizes (last_size, first_bit,
1033 TREE_OPERAND (size, 1),
1035 integer_zerop (TREE_OPERAND (size, 2))
1036 ? last_size : merge_sizes (last_size, first_bit,
1037 TREE_OPERAND (size, 2),
1040 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1041 when fed through substitute_in_expr) into thinking that a constant
1042 size is not constant. */
1043 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1044 new_size = TREE_OPERAND (new_size, 0);
1049 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1050 related by the addition of a constant. Return that constant if so. */
1053 compute_related_constant (tree op0, tree op1)
1055 tree op0_var, op1_var;
1056 tree op0_con = split_plus (op0, &op0_var);
1057 tree op1_con = split_plus (op1, &op1_var);
1058 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1060 if (operand_equal_p (op0_var, op1_var, 0))
1062 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1068 /* Utility function of above to split a tree OP which may be a sum, into a
1069 constant part, which is returned, and a variable part, which is stored
1070 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1074 split_plus (tree in, tree *pvar)
1076 /* Strip NOPS in order to ease the tree traversal and maximize the
1077 potential for constant or plus/minus discovery. We need to be careful
1078 to always return and set *pvar to bitsizetype trees, but it's worth
1082 *pvar = convert (bitsizetype, in);
1084 if (TREE_CODE (in) == INTEGER_CST)
1086 *pvar = bitsize_zero_node;
1087 return convert (bitsizetype, in);
1089 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1091 tree lhs_var, rhs_var;
1092 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1093 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1095 if (lhs_var == TREE_OPERAND (in, 0)
1096 && rhs_var == TREE_OPERAND (in, 1))
1097 return bitsize_zero_node;
1099 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1100 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1103 return bitsize_zero_node;
1106 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1107 subprogram. If it is void_type_node, then we are dealing with a procedure,
1108 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1109 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1110 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1111 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1112 object. RETURNS_BY_REF is true if the function returns by reference.
1113 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1114 first parameter) the address of the place to copy its result. */
1117 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1118 bool returns_unconstrained, bool returns_by_ref,
1119 bool returns_by_target_ptr)
1121 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1122 the subprogram formal parameters. This list is generated by traversing the
1123 input list of PARM_DECL nodes. */
1124 tree param_type_list = NULL;
1128 for (param_decl = param_decl_list; param_decl;
1129 param_decl = TREE_CHAIN (param_decl))
1130 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1133 /* The list of the function parameter types has to be terminated by the void
1134 type to signal to the back-end that we are not dealing with a variable
1135 parameter subprogram, but that the subprogram has a fixed number of
1137 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1139 /* The list of argument types has been created in reverse
1141 param_type_list = nreverse (param_type_list);
1143 type = build_function_type (return_type, param_type_list);
1145 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1146 or the new type should, make a copy of TYPE. Likewise for
1147 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1148 if (TYPE_CI_CO_LIST (type) || cico_list
1149 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1150 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1151 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1152 type = copy_type (type);
1154 TYPE_CI_CO_LIST (type) = cico_list;
1155 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1156 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1157 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1161 /* Return a copy of TYPE but safe to modify in any way. */
1164 copy_type (tree type)
1166 tree new_type = copy_node (type);
1168 /* copy_node clears this field instead of copying it, because it is
1169 aliased with TREE_CHAIN. */
1170 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1172 TYPE_POINTER_TO (new_type) = 0;
1173 TYPE_REFERENCE_TO (new_type) = 0;
1174 TYPE_MAIN_VARIANT (new_type) = new_type;
1175 TYPE_NEXT_VARIANT (new_type) = 0;
1180 /* Return a subtype of sizetype with range MIN to MAX and whose
1181 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1182 of the associated TYPE_DECL. */
1185 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1187 /* First build a type for the desired range. */
1188 tree type = build_index_2_type (min, max);
1190 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1191 if (TYPE_INDEX_TYPE (type) == index)
1194 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1195 no way of sharing these types, but that's only a small hole. */
1196 if (TYPE_INDEX_TYPE (type))
1197 type = copy_type (type);
1199 SET_TYPE_INDEX_TYPE (type, index);
1200 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1205 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1206 sizetype is used. */
1209 create_range_type (tree type, tree min, tree max)
1213 if (type == NULL_TREE)
1216 /* First build a type with the base range. */
1218 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1220 min = convert (type, min);
1221 max = convert (type, max);
1223 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1224 if (TYPE_RM_MIN_VALUE (range_type)
1225 && TYPE_RM_MAX_VALUE (range_type)
1226 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1227 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1230 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1231 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1232 range_type = copy_type (range_type);
1234 /* Then set the actual range. */
1235 SET_TYPE_RM_MIN_VALUE (range_type, min);
1236 SET_TYPE_RM_MAX_VALUE (range_type, max);
1241 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1242 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1246 create_type_stub_decl (tree type_name, tree type)
1248 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1249 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1250 emitted in DWARF. */
1251 tree type_decl = build_decl (input_location,
1252 TYPE_DECL, type_name, type);
1253 DECL_ARTIFICIAL (type_decl) = 1;
1257 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1258 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1259 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1260 true if we need to write debug information about this type. GNAT_NODE
1261 is used for the position of the decl. */
1264 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1265 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1267 enum tree_code code = TREE_CODE (type);
1268 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1271 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1272 gcc_assert (!TYPE_IS_DUMMY_P (type));
1274 /* If the type hasn't been named yet, we're naming it; preserve an existing
1275 TYPE_STUB_DECL that has been attached to it for some purpose. */
1276 if (!named && TYPE_STUB_DECL (type))
1278 type_decl = TYPE_STUB_DECL (type);
1279 DECL_NAME (type_decl) = type_name;
1282 type_decl = build_decl (input_location,
1283 TYPE_DECL, type_name, type);
1285 DECL_ARTIFICIAL (type_decl) = artificial_p;
1286 gnat_pushdecl (type_decl, gnat_node);
1287 process_attributes (type_decl, attr_list);
1289 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1290 This causes the name to be also viewed as a "tag" by the debug
1291 back-end, with the advantage that no DW_TAG_typedef is emitted
1292 for artificial "tagged" types in DWARF. */
1294 TYPE_STUB_DECL (type) = type_decl;
1296 /* Pass the type declaration to the debug back-end unless this is an
1297 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1298 type for which debugging information was not requested, or else an
1299 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1300 handled separately. And do not pass dummy types either. */
1301 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1302 DECL_IGNORED_P (type_decl) = 1;
1303 else if (code != ENUMERAL_TYPE
1304 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1305 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1306 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1307 && !(code == RECORD_TYPE
1309 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1310 rest_of_type_decl_compilation (type_decl);
1315 /* Return a VAR_DECL or CONST_DECL node.
1317 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1318 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1319 the GCC tree for an optional initial expression; NULL_TREE if none.
1321 CONST_FLAG is true if this variable is constant, in which case we might
1322 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1324 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1325 definition to be made visible outside of the current compilation unit, for
1326 instance variable definitions in a package specification.
1328 EXTERN_FLAG is true when processing an external variable declaration (as
1329 opposed to a definition: no storage is to be allocated for the variable).
1331 STATIC_FLAG is only relevant when not at top level. In that case
1332 it indicates whether to always allocate storage to the variable.
1334 GNAT_NODE is used for the position of the decl. */
1337 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1338 bool const_flag, bool public_flag, bool extern_flag,
1339 bool static_flag, bool const_decl_allowed_p,
1340 struct attrib *attr_list, Node_Id gnat_node)
1344 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1345 && (global_bindings_p () || static_flag
1346 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1347 : TREE_CONSTANT (var_init)));
1349 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1350 case the initializer may be used in-lieu of the DECL node (as done in
1351 Identifier_to_gnu). This is useful to prevent the need of elaboration
1352 code when an identifier for which such a decl is made is in turn used as
1353 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1354 but extra constraints apply to this choice (see below) and are not
1355 relevant to the distinction we wish to make. */
1356 bool constant_p = const_flag && init_const;
1358 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1359 and may be used for scalars in general but not for aggregates. */
1361 = build_decl (input_location,
1362 (constant_p && const_decl_allowed_p
1363 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1366 /* If this is external, throw away any initializations (they will be done
1367 elsewhere) unless this is a constant for which we would like to remain
1368 able to get the initializer. If we are defining a global here, leave a
1369 constant initialization and save any variable elaborations for the
1370 elaboration routine. If we are just annotating types, throw away the
1371 initialization if it isn't a constant. */
1372 if ((extern_flag && !constant_p)
1373 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1374 var_init = NULL_TREE;
1376 /* At the global level, an initializer requiring code to be generated
1377 produces elaboration statements. Check that such statements are allowed,
1378 that is, not violating a No_Elaboration_Code restriction. */
1379 if (global_bindings_p () && var_init != 0 && ! init_const)
1380 Check_Elaboration_Code_Allowed (gnat_node);
1382 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1383 try to fiddle with DECL_COMMON. However, on platforms that don't
1384 support global BSS sections, uninitialized global variables would
1385 go in DATA instead, thus increasing the size of the executable. */
1387 && TREE_CODE (var_decl) == VAR_DECL
1388 && !have_global_bss_p ())
1389 DECL_COMMON (var_decl) = 1;
1390 DECL_INITIAL (var_decl) = var_init;
1391 TREE_READONLY (var_decl) = const_flag;
1392 DECL_EXTERNAL (var_decl) = extern_flag;
1393 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1394 TREE_CONSTANT (var_decl) = constant_p;
1395 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1396 = TYPE_VOLATILE (type);
1398 /* If it's public and not external, always allocate storage for it.
1399 At the global binding level we need to allocate static storage for the
1400 variable if and only if it's not external. If we are not at the top level
1401 we allocate automatic storage unless requested not to. */
1402 TREE_STATIC (var_decl)
1403 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1405 /* For an external constant whose initializer is not absolute, do not emit
1406 debug info. In DWARF this would mean a global relocation in a read-only
1407 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1410 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1411 != null_pointer_node)
1412 DECL_IGNORED_P (var_decl) = 1;
1414 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1415 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1417 process_attributes (var_decl, attr_list);
1419 /* Add this decl to the current binding level. */
1420 gnat_pushdecl (var_decl, gnat_node);
1422 if (TREE_SIDE_EFFECTS (var_decl))
1423 TREE_ADDRESSABLE (var_decl) = 1;
1425 if (TREE_CODE (var_decl) != CONST_DECL)
1427 if (global_bindings_p ())
1428 rest_of_decl_compilation (var_decl, true, 0);
1431 expand_decl (var_decl);
1436 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1439 aggregate_type_contains_array_p (tree type)
1441 switch (TREE_CODE (type))
1445 case QUAL_UNION_TYPE:
1448 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1449 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1450 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1463 /* Return a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1464 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1465 this field is in a record type with a "pragma pack". If SIZE is nonzero
1466 it is the specified size for this field. If POS is nonzero, it is the bit
1467 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1468 the address of this field for aliasing purposes. If it is negative, we
1469 should not make a bitfield, which is used by make_aligning_type. */
1472 create_field_decl (tree field_name, tree field_type, tree record_type,
1473 int packed, tree size, tree pos, int addressable)
1475 tree field_decl = build_decl (input_location,
1476 FIELD_DECL, field_name, field_type);
1478 DECL_CONTEXT (field_decl) = record_type;
1479 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1481 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1482 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1483 Likewise for an aggregate without specified position that contains an
1484 array, because in this case slices of variable length of this array
1485 must be handled by GCC and variable-sized objects need to be aligned
1486 to at least a byte boundary. */
1487 if (packed && (TYPE_MODE (field_type) == BLKmode
1489 && AGGREGATE_TYPE_P (field_type)
1490 && aggregate_type_contains_array_p (field_type))))
1491 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1493 /* If a size is specified, use it. Otherwise, if the record type is packed
1494 compute a size to use, which may differ from the object's natural size.
1495 We always set a size in this case to trigger the checks for bitfield
1496 creation below, which is typically required when no position has been
1499 size = convert (bitsizetype, size);
1500 else if (packed == 1)
1502 size = rm_size (field_type);
1504 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1506 if (TREE_CODE (size) == INTEGER_CST
1507 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1508 size = round_up (size, BITS_PER_UNIT);
1511 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1512 specified for two reasons: first if the size differs from the natural
1513 size. Second, if the alignment is insufficient. There are a number of
1514 ways the latter can be true.
1516 We never make a bitfield if the type of the field has a nonconstant size,
1517 because no such entity requiring bitfield operations should reach here.
1519 We do *preventively* make a bitfield when there might be the need for it
1520 but we don't have all the necessary information to decide, as is the case
1521 of a field with no specified position in a packed record.
1523 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1524 in layout_decl or finish_record_type to clear the bit_field indication if
1525 it is in fact not needed. */
1526 if (addressable >= 0
1528 && TREE_CODE (size) == INTEGER_CST
1529 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1530 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1531 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1533 || (TYPE_ALIGN (record_type) != 0
1534 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1536 DECL_BIT_FIELD (field_decl) = 1;
1537 DECL_SIZE (field_decl) = size;
1538 if (!packed && !pos)
1540 if (TYPE_ALIGN (record_type) != 0
1541 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1542 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1544 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1548 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1550 /* Bump the alignment if need be, either for bitfield/packing purposes or
1551 to satisfy the type requirements if no such consideration applies. When
1552 we get the alignment from the type, indicate if this is from an explicit
1553 user request, which prevents stor-layout from lowering it later on. */
1555 unsigned int bit_align
1556 = (DECL_BIT_FIELD (field_decl) ? 1
1557 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1559 if (bit_align > DECL_ALIGN (field_decl))
1560 DECL_ALIGN (field_decl) = bit_align;
1561 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1563 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1564 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1570 /* We need to pass in the alignment the DECL is known to have.
1571 This is the lowest-order bit set in POS, but no more than
1572 the alignment of the record, if one is specified. Note
1573 that an alignment of 0 is taken as infinite. */
1574 unsigned int known_align;
1576 if (host_integerp (pos, 1))
1577 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1579 known_align = BITS_PER_UNIT;
1581 if (TYPE_ALIGN (record_type)
1582 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1583 known_align = TYPE_ALIGN (record_type);
1585 layout_decl (field_decl, known_align);
1586 SET_DECL_OFFSET_ALIGN (field_decl,
1587 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1589 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1590 &DECL_FIELD_BIT_OFFSET (field_decl),
1591 DECL_OFFSET_ALIGN (field_decl), pos);
1594 /* In addition to what our caller says, claim the field is addressable if we
1595 know that its type is not suitable.
1597 The field may also be "technically" nonaddressable, meaning that even if
1598 we attempt to take the field's address we will actually get the address
1599 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1600 value we have at this point is not accurate enough, so we don't account
1601 for this here and let finish_record_type decide. */
1602 if (!addressable && !type_for_nonaliased_component_p (field_type))
1605 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1610 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1611 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1612 (either an In parameter or an address of a pass-by-ref parameter). */
1615 create_param_decl (tree param_name, tree param_type, bool readonly)
1617 tree param_decl = build_decl (input_location,
1618 PARM_DECL, param_name, param_type);
1620 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1621 can lead to various ABI violations. */
1622 if (targetm.calls.promote_prototypes (NULL_TREE)
1623 && INTEGRAL_TYPE_P (param_type)
1624 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1626 /* We have to be careful about biased types here. Make a subtype
1627 of integer_type_node with the proper biasing. */
1628 if (TREE_CODE (param_type) == INTEGER_TYPE
1629 && TYPE_BIASED_REPRESENTATION_P (param_type))
1632 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1633 TREE_TYPE (subtype) = integer_type_node;
1634 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1635 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1636 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1637 param_type = subtype;
1640 param_type = integer_type_node;
1643 DECL_ARG_TYPE (param_decl) = param_type;
1644 TREE_READONLY (param_decl) = readonly;
1648 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1651 process_attributes (tree decl, struct attrib *attr_list)
1653 for (; attr_list; attr_list = attr_list->next)
1654 switch (attr_list->type)
1656 case ATTR_MACHINE_ATTRIBUTE:
1657 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1659 ATTR_FLAG_TYPE_IN_PLACE);
1662 case ATTR_LINK_ALIAS:
1663 if (! DECL_EXTERNAL (decl))
1665 TREE_STATIC (decl) = 1;
1666 assemble_alias (decl, attr_list->name);
1670 case ATTR_WEAK_EXTERNAL:
1672 declare_weak (decl);
1674 post_error ("?weak declarations not supported on this target",
1675 attr_list->error_point);
1678 case ATTR_LINK_SECTION:
1679 if (targetm.have_named_sections)
1681 DECL_SECTION_NAME (decl)
1682 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1683 IDENTIFIER_POINTER (attr_list->name));
1684 DECL_COMMON (decl) = 0;
1687 post_error ("?section attributes are not supported for this target",
1688 attr_list->error_point);
1691 case ATTR_LINK_CONSTRUCTOR:
1692 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1693 TREE_USED (decl) = 1;
1696 case ATTR_LINK_DESTRUCTOR:
1697 DECL_STATIC_DESTRUCTOR (decl) = 1;
1698 TREE_USED (decl) = 1;
1701 case ATTR_THREAD_LOCAL_STORAGE:
1702 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1703 DECL_COMMON (decl) = 0;
1708 /* Record DECL as a global renaming pointer. */
1711 record_global_renaming_pointer (tree decl)
1713 gcc_assert (DECL_RENAMED_OBJECT (decl));
1714 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1717 /* Invalidate the global renaming pointers. */
1720 invalidate_global_renaming_pointers (void)
1725 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1726 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1728 VEC_free (tree, gc, global_renaming_pointers);
1731 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1735 value_factor_p (tree value, HOST_WIDE_INT factor)
1737 if (host_integerp (value, 1))
1738 return tree_low_cst (value, 1) % factor == 0;
1740 if (TREE_CODE (value) == MULT_EXPR)
1741 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1742 || value_factor_p (TREE_OPERAND (value, 1), factor));
1747 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1748 unless we can prove these 2 fields are laid out in such a way that no gap
1749 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1750 is the distance in bits between the end of PREV_FIELD and the starting
1751 position of CURR_FIELD. It is ignored if null. */
1754 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1756 /* If this is the first field of the record, there cannot be any gap */
1760 /* If the previous field is a union type, then return False: The only
1761 time when such a field is not the last field of the record is when
1762 there are other components at fixed positions after it (meaning there
1763 was a rep clause for every field), in which case we don't want the
1764 alignment constraint to override them. */
1765 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1768 /* If the distance between the end of prev_field and the beginning of
1769 curr_field is constant, then there is a gap if the value of this
1770 constant is not null. */
1771 if (offset && host_integerp (offset, 1))
1772 return !integer_zerop (offset);
1774 /* If the size and position of the previous field are constant,
1775 then check the sum of this size and position. There will be a gap
1776 iff it is not multiple of the current field alignment. */
1777 if (host_integerp (DECL_SIZE (prev_field), 1)
1778 && host_integerp (bit_position (prev_field), 1))
1779 return ((tree_low_cst (bit_position (prev_field), 1)
1780 + tree_low_cst (DECL_SIZE (prev_field), 1))
1781 % DECL_ALIGN (curr_field) != 0);
1783 /* If both the position and size of the previous field are multiples
1784 of the current field alignment, there cannot be any gap. */
1785 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1786 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1789 /* Fallback, return that there may be a potential gap */
1793 /* Returns a LABEL_DECL node for LABEL_NAME. */
1796 create_label_decl (tree label_name)
1798 tree label_decl = build_decl (input_location,
1799 LABEL_DECL, label_name, void_type_node);
1801 DECL_CONTEXT (label_decl) = current_function_decl;
1802 DECL_MODE (label_decl) = VOIDmode;
1803 DECL_SOURCE_LOCATION (label_decl) = input_location;
1808 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1809 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1810 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1811 PARM_DECL nodes chained through the TREE_CHAIN field).
1813 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1814 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1817 create_subprog_decl (tree subprog_name, tree asm_name,
1818 tree subprog_type, tree param_decl_list, bool inline_flag,
1819 bool public_flag, bool extern_flag,
1820 struct attrib *attr_list, Node_Id gnat_node)
1822 tree return_type = TREE_TYPE (subprog_type);
1823 tree subprog_decl = build_decl (input_location,
1824 FUNCTION_DECL, subprog_name, subprog_type);
1826 /* If this is a non-inline function nested inside an inlined external
1827 function, we cannot honor both requests without cloning the nested
1828 function in the current unit since it is private to the other unit.
1829 We could inline the nested function as well but it's probably better
1830 to err on the side of too little inlining. */
1832 && current_function_decl
1833 && DECL_DECLARED_INLINE_P (current_function_decl)
1834 && DECL_EXTERNAL (current_function_decl))
1835 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1837 DECL_EXTERNAL (subprog_decl) = extern_flag;
1838 TREE_PUBLIC (subprog_decl) = public_flag;
1839 TREE_STATIC (subprog_decl) = 1;
1840 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1841 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1842 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1843 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1844 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1845 DECL_RESULT (subprog_decl) = build_decl (input_location,
1846 RESULT_DECL, 0, return_type);
1847 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1848 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1850 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1851 target by-reference return mechanism. This is not supported all the
1852 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1853 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1854 the RESULT_DECL instead - see gnat_genericize for more details. */
1855 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1857 tree result_decl = DECL_RESULT (subprog_decl);
1859 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1860 DECL_BY_REFERENCE (result_decl) = 1;
1865 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1867 /* The expand_main_function circuitry expects "main_identifier_node" to
1868 designate the DECL_NAME of the 'main' entry point, in turn expected
1869 to be declared as the "main" function literally by default. Ada
1870 program entry points are typically declared with a different name
1871 within the binder generated file, exported as 'main' to satisfy the
1872 system expectations. Redirect main_identifier_node in this case. */
1873 if (asm_name == main_identifier_node)
1874 main_identifier_node = DECL_NAME (subprog_decl);
1877 process_attributes (subprog_decl, attr_list);
1879 /* Add this decl to the current binding level. */
1880 gnat_pushdecl (subprog_decl, gnat_node);
1882 /* Output the assembler code and/or RTL for the declaration. */
1883 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1885 return subprog_decl;
1888 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1889 body. This routine needs to be invoked before processing the declarations
1890 appearing in the subprogram. */
1893 begin_subprog_body (tree subprog_decl)
1897 current_function_decl = subprog_decl;
1898 announce_function (subprog_decl);
1900 /* Enter a new binding level and show that all the parameters belong to
1903 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1904 param_decl = TREE_CHAIN (param_decl))
1905 DECL_CONTEXT (param_decl) = subprog_decl;
1907 make_decl_rtl (subprog_decl);
1909 /* We handle pending sizes via the elaboration of types, so we don't need to
1910 save them. This causes them to be marked as part of the outer function
1911 and then discarded. */
1912 get_pending_sizes ();
1916 /* Helper for the genericization callback. Return a dereference of VAL
1917 if it is of a reference type. */
1920 convert_from_reference (tree val)
1922 tree value_type, ref;
1924 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1927 value_type = TREE_TYPE (TREE_TYPE (val));
1928 ref = build1 (INDIRECT_REF, value_type, val);
1930 /* See if what we reference is CONST or VOLATILE, which requires
1931 looking into array types to get to the component type. */
1933 while (TREE_CODE (value_type) == ARRAY_TYPE)
1934 value_type = TREE_TYPE (value_type);
1937 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1938 TREE_THIS_VOLATILE (ref)
1939 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1941 TREE_SIDE_EFFECTS (ref)
1942 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1947 /* Helper for the genericization callback. Returns true if T denotes
1948 a RESULT_DECL with DECL_BY_REFERENCE set. */
1951 is_byref_result (tree t)
1953 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1957 /* Tree walking callback for gnat_genericize. Currently ...
1959 o Adjust references to the function's DECL_RESULT if it is marked
1960 DECL_BY_REFERENCE and so has had its type turned into a reference
1961 type at the end of the function compilation. */
1964 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1966 /* This implementation is modeled after what the C++ front-end is
1967 doing, basis of the downstream passes behavior. */
1969 tree stmt = *stmt_p;
1970 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1972 /* If we have a direct mention of the result decl, dereference. */
1973 if (is_byref_result (stmt))
1975 *stmt_p = convert_from_reference (stmt);
1980 /* Otherwise, no need to walk the same tree twice. */
1981 if (pointer_set_contains (p_set, stmt))
1987 /* If we are taking the address of what now is a reference, just get the
1989 if (TREE_CODE (stmt) == ADDR_EXPR
1990 && is_byref_result (TREE_OPERAND (stmt, 0)))
1992 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1996 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1997 else if (TREE_CODE (stmt) == RETURN_EXPR
1998 && TREE_OPERAND (stmt, 0)
1999 && is_byref_result (TREE_OPERAND (stmt, 0)))
2002 /* Don't look inside trees that cannot embed references of interest. */
2003 else if (IS_TYPE_OR_DECL_P (stmt))
2006 pointer_set_insert (p_set, *stmt_p);
2011 /* Perform lowering of Ada trees to GENERIC. In particular:
2013 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2014 and adjust all the references to this decl accordingly. */
2017 gnat_genericize (tree fndecl)
2019 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2020 was handled by simply setting TREE_ADDRESSABLE on the result type.
2021 Everything required to actually pass by invisible ref using the target
2022 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2024 This doesn't work with GCC 4 any more for several reasons. First, the
2025 gimplification process might need the creation of temporaries of this
2026 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2027 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2028 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2029 be explicitly accounted for by the front-end in the function body.
2031 We achieve the complete transformation in two steps:
2033 1/ create_subprog_decl performs early attribute tweaks: it clears
2034 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2035 the result decl. The former ensures that the bit isn't set in the GCC
2036 tree saved for the function, so prevents ICEs on temporary creation.
2037 The latter we use here to trigger the rest of the processing.
2039 2/ This function performs the type transformation on the result decl
2040 and adjusts all the references to this decl from the function body
2043 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2044 strategy, which escapes the gimplifier temporary creation issues by
2045 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2046 on simple specific support code in aggregate_value_p to look at the
2047 target function result decl explicitly. */
2049 struct pointer_set_t *p_set;
2050 tree decl_result = DECL_RESULT (fndecl);
2052 if (!DECL_BY_REFERENCE (decl_result))
2055 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2056 occurrences in the function body using the common tree-walking facility.
2057 We want to see every occurrence of the result decl to adjust the
2058 referencing tree, so need to use our own pointer set to control which
2059 trees should be visited again or not. */
2061 p_set = pointer_set_create ();
2063 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2064 TREE_ADDRESSABLE (decl_result) = 0;
2065 relayout_decl (decl_result);
2067 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2069 pointer_set_destroy (p_set);
2072 /* Finish the definition of the current subprogram BODY and compile it all the
2073 way to assembler language output. ELAB_P tells if this is called for an
2074 elaboration routine, to be entirely discarded if empty. */
2077 end_subprog_body (tree body, bool elab_p)
2079 tree fndecl = current_function_decl;
2081 /* Mark the BLOCK for this level as being for this function and pop the
2082 level. Since the vars in it are the parameters, clear them. */
2083 BLOCK_VARS (current_binding_level->block) = 0;
2084 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2085 DECL_INITIAL (fndecl) = current_binding_level->block;
2088 /* We handle pending sizes via the elaboration of types, so we don't
2089 need to save them. */
2090 get_pending_sizes ();
2092 /* Mark the RESULT_DECL as being in this subprogram. */
2093 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2095 DECL_SAVED_TREE (fndecl) = body;
2097 current_function_decl = DECL_CONTEXT (fndecl);
2100 /* We cannot track the location of errors past this point. */
2101 error_gnat_node = Empty;
2103 /* If we're only annotating types, don't actually compile this function. */
2104 if (type_annotate_only)
2107 /* Perform the required pre-gimplification transformations on the tree. */
2108 gnat_genericize (fndecl);
2110 /* We do different things for nested and non-nested functions.
2111 ??? This should be in cgraph. */
2112 if (!DECL_CONTEXT (fndecl))
2114 gnat_gimplify_function (fndecl);
2116 /* If this is an empty elaboration proc, just discard the node.
2117 Otherwise, compile further. */
2118 if (elab_p && empty_body_p (gimple_body (fndecl)))
2119 cgraph_remove_node (cgraph_node (fndecl));
2121 cgraph_finalize_function (fndecl, false);
2124 /* Register this function with cgraph just far enough to get it
2125 added to our parent's nested function list. */
2126 (void) cgraph_node (fndecl);
2129 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2132 gnat_gimplify_function (tree fndecl)
2134 struct cgraph_node *cgn;
2136 dump_function (TDI_original, fndecl);
2137 gimplify_function_tree (fndecl);
2138 dump_function (TDI_generic, fndecl);
2140 /* Convert all nested functions to GIMPLE now. We do things in this order
2141 so that items like VLA sizes are expanded properly in the context of the
2142 correct function. */
2143 cgn = cgraph_node (fndecl);
2144 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2145 gnat_gimplify_function (cgn->decl);
2149 gnat_builtin_function (tree decl)
2151 gnat_pushdecl (decl, Empty);
2155 /* Return an integer type with the number of bits of precision given by
2156 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2157 it is a signed type. */
2160 gnat_type_for_size (unsigned precision, int unsignedp)
2165 if (precision <= 2 * MAX_BITS_PER_WORD
2166 && signed_and_unsigned_types[precision][unsignedp])
2167 return signed_and_unsigned_types[precision][unsignedp];
2170 t = make_unsigned_type (precision);
2172 t = make_signed_type (precision);
2174 if (precision <= 2 * MAX_BITS_PER_WORD)
2175 signed_and_unsigned_types[precision][unsignedp] = t;
2179 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2180 TYPE_NAME (t) = get_identifier (type_name);
2186 /* Likewise for floating-point types. */
2189 float_type_for_precision (int precision, enum machine_mode mode)
2194 if (float_types[(int) mode])
2195 return float_types[(int) mode];
2197 float_types[(int) mode] = t = make_node (REAL_TYPE);
2198 TYPE_PRECISION (t) = precision;
2201 gcc_assert (TYPE_MODE (t) == mode);
2204 sprintf (type_name, "FLOAT_%d", precision);
2205 TYPE_NAME (t) = get_identifier (type_name);
2211 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2212 an unsigned type; otherwise a signed type is returned. */
2215 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2217 if (mode == BLKmode)
2219 else if (mode == VOIDmode)
2220 return void_type_node;
2221 else if (COMPLEX_MODE_P (mode))
2223 else if (SCALAR_FLOAT_MODE_P (mode))
2224 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2225 else if (SCALAR_INT_MODE_P (mode))
2226 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2231 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2234 gnat_unsigned_type (tree type_node)
2236 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2238 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2240 type = copy_node (type);
2241 TREE_TYPE (type) = type_node;
2243 else if (TREE_TYPE (type_node)
2244 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2245 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2247 type = copy_node (type);
2248 TREE_TYPE (type) = TREE_TYPE (type_node);
2254 /* Return the signed version of a TYPE_NODE, a scalar type. */
2257 gnat_signed_type (tree type_node)
2259 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2261 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2263 type = copy_node (type);
2264 TREE_TYPE (type) = type_node;
2266 else if (TREE_TYPE (type_node)
2267 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2268 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2270 type = copy_node (type);
2271 TREE_TYPE (type) = TREE_TYPE (type_node);
2277 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2278 transparently converted to each other. */
2281 gnat_types_compatible_p (tree t1, tree t2)
2283 enum tree_code code;
2285 /* This is the default criterion. */
2286 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2289 /* We only check structural equivalence here. */
2290 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2293 /* Array types are also compatible if they are constrained and have
2294 the same component type and the same domain. */
2295 if (code == ARRAY_TYPE
2296 && TREE_TYPE (t1) == TREE_TYPE (t2)
2297 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2298 || (TYPE_DOMAIN (t1)
2300 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2301 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2302 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2303 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2306 /* Padding record types are also compatible if they pad the same
2307 type and have the same constant size. */
2308 if (code == RECORD_TYPE
2309 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2310 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2311 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2317 /* EXP is an expression for the size of an object. If this size contains
2318 discriminant references, replace them with the maximum (if MAX_P) or
2319 minimum (if !MAX_P) possible value of the discriminant. */
2322 max_size (tree exp, bool max_p)
2324 enum tree_code code = TREE_CODE (exp);
2325 tree type = TREE_TYPE (exp);
2327 switch (TREE_CODE_CLASS (code))
2329 case tcc_declaration:
2334 if (code == CALL_EXPR)
2337 int i, n = call_expr_nargs (exp);
2340 argarray = (tree *) alloca (n * sizeof (tree));
2341 for (i = 0; i < n; i++)
2342 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2343 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2348 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2349 modify. Otherwise, we treat it like a variable. */
2350 if (!CONTAINS_PLACEHOLDER_P (exp))
2353 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2355 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2357 case tcc_comparison:
2358 return max_p ? size_one_node : size_zero_node;
2362 case tcc_expression:
2363 switch (TREE_CODE_LENGTH (code))
2366 if (code == NON_LVALUE_EXPR)
2367 return max_size (TREE_OPERAND (exp, 0), max_p);
2370 fold_build1 (code, type,
2371 max_size (TREE_OPERAND (exp, 0),
2372 code == NEGATE_EXPR ? !max_p : max_p));
2375 if (code == COMPOUND_EXPR)
2376 return max_size (TREE_OPERAND (exp, 1), max_p);
2378 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2379 may provide a tighter bound on max_size. */
2380 if (code == MINUS_EXPR
2381 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2383 tree lhs = fold_build2 (MINUS_EXPR, type,
2384 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2385 TREE_OPERAND (exp, 1));
2386 tree rhs = fold_build2 (MINUS_EXPR, type,
2387 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2388 TREE_OPERAND (exp, 1));
2389 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2390 max_size (lhs, max_p),
2391 max_size (rhs, max_p));
2395 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2396 tree rhs = max_size (TREE_OPERAND (exp, 1),
2397 code == MINUS_EXPR ? !max_p : max_p);
2399 /* Special-case wanting the maximum value of a MIN_EXPR.
2400 In that case, if one side overflows, return the other.
2401 sizetype is signed, but we know sizes are non-negative.
2402 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2403 overflowing or the maximum possible value and the RHS
2407 && TREE_CODE (rhs) == INTEGER_CST
2408 && TREE_OVERFLOW (rhs))
2412 && TREE_CODE (lhs) == INTEGER_CST
2413 && TREE_OVERFLOW (lhs))
2415 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2416 && ((TREE_CODE (lhs) == INTEGER_CST
2417 && TREE_OVERFLOW (lhs))
2418 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2419 && !TREE_CONSTANT (rhs))
2422 return fold_build2 (code, type, lhs, rhs);
2426 if (code == SAVE_EXPR)
2428 else if (code == COND_EXPR)
2429 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2430 max_size (TREE_OPERAND (exp, 1), max_p),
2431 max_size (TREE_OPERAND (exp, 2), max_p));
2434 /* Other tree classes cannot happen. */
2442 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2443 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2444 Return a constructor for the template. */
2447 build_template (tree template_type, tree array_type, tree expr)
2449 tree template_elts = NULL_TREE;
2450 tree bound_list = NULL_TREE;
2453 while (TREE_CODE (array_type) == RECORD_TYPE
2454 && (TYPE_IS_PADDING_P (array_type)
2455 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2456 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2458 if (TREE_CODE (array_type) == ARRAY_TYPE
2459 || (TREE_CODE (array_type) == INTEGER_TYPE
2460 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2461 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2463 /* First make the list for a CONSTRUCTOR for the template. Go down the
2464 field list of the template instead of the type chain because this
2465 array might be an Ada array of arrays and we can't tell where the
2466 nested arrays stop being the underlying object. */
2468 for (field = TYPE_FIELDS (template_type); field;
2470 ? (bound_list = TREE_CHAIN (bound_list))
2471 : (array_type = TREE_TYPE (array_type))),
2472 field = TREE_CHAIN (TREE_CHAIN (field)))
2474 tree bounds, min, max;
2476 /* If we have a bound list, get the bounds from there. Likewise
2477 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2478 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2479 This will give us a maximum range. */
2481 bounds = TREE_VALUE (bound_list);
2482 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2483 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2484 else if (expr && TREE_CODE (expr) == PARM_DECL
2485 && DECL_BY_COMPONENT_PTR_P (expr))
2486 bounds = TREE_TYPE (field);
2490 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2491 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2493 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2494 substitute it from OBJECT. */
2495 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2496 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2498 template_elts = tree_cons (TREE_CHAIN (field), max,
2499 tree_cons (field, min, template_elts));
2502 return gnat_build_constructor (template_type, nreverse (template_elts));
2505 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2506 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2507 in the type contains in its DECL_INITIAL the expression to use when
2508 a constructor is made for the type. GNAT_ENTITY is an entity used
2509 to print out an error message if the mechanism cannot be applied to
2510 an object of that type and also for the name. */
2513 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2515 tree record_type = make_node (RECORD_TYPE);
2516 tree pointer32_type;
2517 tree field_list = 0;
2526 /* If TYPE is an unconstrained array, use the underlying array type. */
2527 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2528 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2530 /* If this is an array, compute the number of dimensions in the array,
2531 get the index types, and point to the inner type. */
2532 if (TREE_CODE (type) != ARRAY_TYPE)
2535 for (ndim = 1, inner_type = type;
2536 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2537 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2538 ndim++, inner_type = TREE_TYPE (inner_type))
2541 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2543 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2544 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2545 for (i = ndim - 1, inner_type = type;
2547 i--, inner_type = TREE_TYPE (inner_type))
2548 idx_arr[i] = TYPE_DOMAIN (inner_type);
2550 for (i = 0, inner_type = type;
2552 i++, inner_type = TREE_TYPE (inner_type))
2553 idx_arr[i] = TYPE_DOMAIN (inner_type);
2555 /* Now get the DTYPE value. */
2556 switch (TREE_CODE (type))
2561 if (TYPE_VAX_FLOATING_POINT_P (type))
2562 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2575 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2578 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2581 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2584 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2587 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2590 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2596 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2600 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2601 && TYPE_VAX_FLOATING_POINT_P (type))
2602 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2614 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2625 /* Get the CLASS value. */
2628 case By_Descriptor_A:
2629 case By_Short_Descriptor_A:
2632 case By_Descriptor_NCA:
2633 case By_Short_Descriptor_NCA:
2636 case By_Descriptor_SB:
2637 case By_Short_Descriptor_SB:
2641 case By_Short_Descriptor:
2642 case By_Descriptor_S:
2643 case By_Short_Descriptor_S:
2649 /* Make the type for a descriptor for VMS. The first four fields
2650 are the same for all types. */
2653 = chainon (field_list,
2654 make_descriptor_field
2655 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2656 size_in_bytes ((mech == By_Descriptor_A ||
2657 mech == By_Short_Descriptor_A)
2658 ? inner_type : type)));
2660 field_list = chainon (field_list,
2661 make_descriptor_field ("DTYPE",
2662 gnat_type_for_size (8, 1),
2663 record_type, size_int (dtype)));
2664 field_list = chainon (field_list,
2665 make_descriptor_field ("CLASS",
2666 gnat_type_for_size (8, 1),
2667 record_type, size_int (klass)));
2669 /* Of course this will crash at run-time if the address space is not
2670 within the low 32 bits, but there is nothing else we can do. */
2671 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2674 = chainon (field_list,
2675 make_descriptor_field
2676 ("POINTER", pointer32_type, record_type,
2677 build_unary_op (ADDR_EXPR,
2679 build0 (PLACEHOLDER_EXPR, type))));
2684 case By_Short_Descriptor:
2685 case By_Descriptor_S:
2686 case By_Short_Descriptor_S:
2689 case By_Descriptor_SB:
2690 case By_Short_Descriptor_SB:
2692 = chainon (field_list,
2693 make_descriptor_field
2694 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2695 TREE_CODE (type) == ARRAY_TYPE
2696 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2698 = chainon (field_list,
2699 make_descriptor_field
2700 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2701 TREE_CODE (type) == ARRAY_TYPE
2702 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2705 case By_Descriptor_A:
2706 case By_Short_Descriptor_A:
2707 case By_Descriptor_NCA:
2708 case By_Short_Descriptor_NCA:
2709 field_list = chainon (field_list,
2710 make_descriptor_field ("SCALE",
2711 gnat_type_for_size (8, 1),
2715 field_list = chainon (field_list,
2716 make_descriptor_field ("DIGITS",
2717 gnat_type_for_size (8, 1),
2722 = chainon (field_list,
2723 make_descriptor_field
2724 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2725 size_int ((mech == By_Descriptor_NCA ||
2726 mech == By_Short_Descriptor_NCA)
2728 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2729 : (TREE_CODE (type) == ARRAY_TYPE
2730 && TYPE_CONVENTION_FORTRAN_P (type)
2733 field_list = chainon (field_list,
2734 make_descriptor_field ("DIMCT",
2735 gnat_type_for_size (8, 1),
2739 field_list = chainon (field_list,
2740 make_descriptor_field ("ARSIZE",
2741 gnat_type_for_size (32, 1),
2743 size_in_bytes (type)));
2745 /* Now build a pointer to the 0,0,0... element. */
2746 tem = build0 (PLACEHOLDER_EXPR, type);
2747 for (i = 0, inner_type = type; i < ndim;
2748 i++, inner_type = TREE_TYPE (inner_type))
2749 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2750 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2751 NULL_TREE, NULL_TREE);
2754 = chainon (field_list,
2755 make_descriptor_field
2757 build_pointer_type_for_mode (inner_type, SImode, false),
2760 build_pointer_type_for_mode (inner_type, SImode,
2764 /* Next come the addressing coefficients. */
2765 tem = size_one_node;
2766 for (i = 0; i < ndim; i++)
2770 = size_binop (MULT_EXPR, tem,
2771 size_binop (PLUS_EXPR,
2772 size_binop (MINUS_EXPR,
2773 TYPE_MAX_VALUE (idx_arr[i]),
2774 TYPE_MIN_VALUE (idx_arr[i])),
2777 fname[0] = ((mech == By_Descriptor_NCA ||
2778 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2779 fname[1] = '0' + i, fname[2] = 0;
2781 = chainon (field_list,
2782 make_descriptor_field (fname,
2783 gnat_type_for_size (32, 1),
2784 record_type, idx_length));
2786 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2790 /* Finally here are the bounds. */
2791 for (i = 0; i < ndim; i++)
2795 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2797 = chainon (field_list,
2798 make_descriptor_field
2799 (fname, gnat_type_for_size (32, 1), record_type,
2800 TYPE_MIN_VALUE (idx_arr[i])));
2804 = chainon (field_list,
2805 make_descriptor_field
2806 (fname, gnat_type_for_size (32, 1), record_type,
2807 TYPE_MAX_VALUE (idx_arr[i])));
2812 post_error ("unsupported descriptor type for &", gnat_entity);
2815 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2816 finish_record_type (record_type, field_list, 0, true);
2820 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2821 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2822 in the type contains in its DECL_INITIAL the expression to use when
2823 a constructor is made for the type. GNAT_ENTITY is an entity used
2824 to print out an error message if the mechanism cannot be applied to
2825 an object of that type and also for the name. */
2828 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2830 tree record64_type = make_node (RECORD_TYPE);
2831 tree pointer64_type;
2832 tree field_list64 = 0;
2841 /* If TYPE is an unconstrained array, use the underlying array type. */
2842 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2843 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2845 /* If this is an array, compute the number of dimensions in the array,
2846 get the index types, and point to the inner type. */
2847 if (TREE_CODE (type) != ARRAY_TYPE)
2850 for (ndim = 1, inner_type = type;
2851 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2852 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2853 ndim++, inner_type = TREE_TYPE (inner_type))
2856 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2858 if (mech != By_Descriptor_NCA
2859 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2860 for (i = ndim - 1, inner_type = type;
2862 i--, inner_type = TREE_TYPE (inner_type))
2863 idx_arr[i] = TYPE_DOMAIN (inner_type);
2865 for (i = 0, inner_type = type;
2867 i++, inner_type = TREE_TYPE (inner_type))
2868 idx_arr[i] = TYPE_DOMAIN (inner_type);
2870 /* Now get the DTYPE value. */
2871 switch (TREE_CODE (type))
2876 if (TYPE_VAX_FLOATING_POINT_P (type))
2877 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2890 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2893 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2896 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2899 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2902 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2905 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2911 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2915 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2916 && TYPE_VAX_FLOATING_POINT_P (type))
2917 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2929 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2940 /* Get the CLASS value. */
2943 case By_Descriptor_A:
2946 case By_Descriptor_NCA:
2949 case By_Descriptor_SB:
2953 case By_Descriptor_S:
2959 /* Make the type for a 64bit descriptor for VMS. The first six fields
2960 are the same for all types. */
2962 field_list64 = chainon (field_list64,
2963 make_descriptor_field ("MBO",
2964 gnat_type_for_size (16, 1),
2965 record64_type, size_int (1)));
2967 field_list64 = chainon (field_list64,
2968 make_descriptor_field ("DTYPE",
2969 gnat_type_for_size (8, 1),
2970 record64_type, size_int (dtype)));
2971 field_list64 = chainon (field_list64,
2972 make_descriptor_field ("CLASS",
2973 gnat_type_for_size (8, 1),
2974 record64_type, size_int (klass)));
2976 field_list64 = chainon (field_list64,
2977 make_descriptor_field ("MBMO",
2978 gnat_type_for_size (32, 1),
2979 record64_type, ssize_int (-1)));
2982 = chainon (field_list64,
2983 make_descriptor_field
2984 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2985 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2987 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2990 = chainon (field_list64,
2991 make_descriptor_field
2992 ("POINTER", pointer64_type, record64_type,
2993 build_unary_op (ADDR_EXPR,
2995 build0 (PLACEHOLDER_EXPR, type))));
3000 case By_Descriptor_S:
3003 case By_Descriptor_SB:
3005 = chainon (field_list64,
3006 make_descriptor_field
3007 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
3008 TREE_CODE (type) == ARRAY_TYPE
3009 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3011 = chainon (field_list64,
3012 make_descriptor_field
3013 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3014 TREE_CODE (type) == ARRAY_TYPE
3015 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3018 case By_Descriptor_A:
3019 case By_Descriptor_NCA:
3020 field_list64 = chainon (field_list64,
3021 make_descriptor_field ("SCALE",
3022 gnat_type_for_size (8, 1),
3026 field_list64 = chainon (field_list64,
3027 make_descriptor_field ("DIGITS",
3028 gnat_type_for_size (8, 1),
3033 = chainon (field_list64,
3034 make_descriptor_field
3035 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3036 size_int (mech == By_Descriptor_NCA
3038 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3039 : (TREE_CODE (type) == ARRAY_TYPE
3040 && TYPE_CONVENTION_FORTRAN_P (type)
3043 field_list64 = chainon (field_list64,
3044 make_descriptor_field ("DIMCT",
3045 gnat_type_for_size (8, 1),
3049 field_list64 = chainon (field_list64,
3050 make_descriptor_field ("MBZ",
3051 gnat_type_for_size (32, 1),
3054 field_list64 = chainon (field_list64,
3055 make_descriptor_field ("ARSIZE",
3056 gnat_type_for_size (64, 1),
3058 size_in_bytes (type)));
3060 /* Now build a pointer to the 0,0,0... element. */
3061 tem = build0 (PLACEHOLDER_EXPR, type);
3062 for (i = 0, inner_type = type; i < ndim;
3063 i++, inner_type = TREE_TYPE (inner_type))
3064 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3065 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3066 NULL_TREE, NULL_TREE);
3069 = chainon (field_list64,
3070 make_descriptor_field
3072 build_pointer_type_for_mode (inner_type, DImode, false),
3075 build_pointer_type_for_mode (inner_type, DImode,
3079 /* Next come the addressing coefficients. */
3080 tem = size_one_node;
3081 for (i = 0; i < ndim; i++)
3085 = size_binop (MULT_EXPR, tem,
3086 size_binop (PLUS_EXPR,
3087 size_binop (MINUS_EXPR,
3088 TYPE_MAX_VALUE (idx_arr[i]),
3089 TYPE_MIN_VALUE (idx_arr[i])),
3092 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3093 fname[1] = '0' + i, fname[2] = 0;
3095 = chainon (field_list64,
3096 make_descriptor_field (fname,
3097 gnat_type_for_size (64, 1),
3098 record64_type, idx_length));
3100 if (mech == By_Descriptor_NCA)
3104 /* Finally here are the bounds. */
3105 for (i = 0; i < ndim; i++)
3109 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3111 = chainon (field_list64,
3112 make_descriptor_field
3113 (fname, gnat_type_for_size (64, 1), record64_type,
3114 TYPE_MIN_VALUE (idx_arr[i])));
3118 = chainon (field_list64,
3119 make_descriptor_field
3120 (fname, gnat_type_for_size (64, 1), record64_type,
3121 TYPE_MAX_VALUE (idx_arr[i])));
3126 post_error ("unsupported descriptor type for &", gnat_entity);
3129 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3130 finish_record_type (record64_type, field_list64, 0, true);
3131 return record64_type;
3134 /* Utility routine for above code to make a field. */
3137 make_descriptor_field (const char *name, tree type,
3138 tree rec_type, tree initial)
3141 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3143 DECL_INITIAL (field) = initial;
3147 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3148 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3149 which the VMS descriptor is passed. */
3152 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3154 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3155 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3156 /* The CLASS field is the 3rd field in the descriptor. */
3157 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3158 /* The POINTER field is the 6th field in the descriptor. */
3159 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
3161 /* Retrieve the value of the POINTER field. */
3163 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3165 if (POINTER_TYPE_P (gnu_type))
3166 return convert (gnu_type, gnu_expr64);
3168 else if (TYPE_FAT_POINTER_P (gnu_type))
3170 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3171 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3172 tree template_type = TREE_TYPE (p_bounds_type);
3173 tree min_field = TYPE_FIELDS (template_type);
3174 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3175 tree template_tree, template_addr, aflags, dimct, t, u;
3176 /* See the head comment of build_vms_descriptor. */
3177 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3178 tree lfield, ufield;
3180 /* Convert POINTER to the type of the P_ARRAY field. */
3181 gnu_expr64 = convert (p_array_type, gnu_expr64);
3185 case 1: /* Class S */
3186 case 15: /* Class SB */
3187 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3188 t = TREE_CHAIN (TREE_CHAIN (klass));
3189 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3190 t = tree_cons (min_field,
3191 convert (TREE_TYPE (min_field), integer_one_node),
3192 tree_cons (max_field,
3193 convert (TREE_TYPE (max_field), t),
3195 template_tree = gnat_build_constructor (template_type, t);
3196 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3198 /* For class S, we are done. */
3202 /* Test that we really have a SB descriptor, like DEC Ada. */
3203 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3204 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3205 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3206 /* If so, there is already a template in the descriptor and
3207 it is located right after the POINTER field. The fields are
3208 64bits so they must be repacked. */
3209 t = TREE_CHAIN (pointer64);
3210 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3211 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3214 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3216 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3218 /* Build the template in the form of a constructor. */
3219 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3220 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3221 ufield, NULL_TREE));
3222 template_tree = gnat_build_constructor (template_type, t);
3224 /* Otherwise use the {1, LENGTH} template we build above. */
3225 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3226 build_unary_op (ADDR_EXPR, p_bounds_type,
3231 case 4: /* Class A */
3232 /* The AFLAGS field is the 3rd field after the pointer in the
3234 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3235 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3236 /* The DIMCT field is the next field in the descriptor after
3239 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3240 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3241 or FL_COEFF or FL_BOUNDS not set. */
3242 u = build_int_cst (TREE_TYPE (aflags), 192);
3243 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3244 build_binary_op (NE_EXPR, integer_type_node,
3246 convert (TREE_TYPE (dimct),
3248 build_binary_op (NE_EXPR, integer_type_node,
3249 build2 (BIT_AND_EXPR,
3253 /* There is already a template in the descriptor and it is located
3254 in block 3. The fields are 64bits so they must be repacked. */
3255 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3257 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3258 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3261 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3263 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3265 /* Build the template in the form of a constructor. */
3266 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3267 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3268 ufield, NULL_TREE));
3269 template_tree = gnat_build_constructor (template_type, t);
3270 template_tree = build3 (COND_EXPR, p_bounds_type, u,
3271 build_call_raise (CE_Length_Check_Failed, Empty,
3272 N_Raise_Constraint_Error),
3275 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3278 case 10: /* Class NCA */
3280 post_error ("unsupported descriptor type for &", gnat_subprog);
3281 template_addr = integer_zero_node;
3285 /* Build the fat pointer in the form of a constructor. */
3286 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3287 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3288 template_addr, NULL_TREE));
3289 return gnat_build_constructor (gnu_type, t);
3296 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3297 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3298 which the VMS descriptor is passed. */
3301 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3303 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3304 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3305 /* The CLASS field is the 3rd field in the descriptor. */
3306 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3307 /* The POINTER field is the 4th field in the descriptor. */
3308 tree pointer = TREE_CHAIN (klass);
3310 /* Retrieve the value of the POINTER field. */
3312 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3314 if (POINTER_TYPE_P (gnu_type))
3315 return convert (gnu_type, gnu_expr32);
3317 else if (TYPE_FAT_POINTER_P (gnu_type))
3319 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3320 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3321 tree template_type = TREE_TYPE (p_bounds_type);
3322 tree min_field = TYPE_FIELDS (template_type);
3323 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3324 tree template_tree, template_addr, aflags, dimct, t, u;
3325 /* See the head comment of build_vms_descriptor. */
3326 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3328 /* Convert POINTER to the type of the P_ARRAY field. */
3329 gnu_expr32 = convert (p_array_type, gnu_expr32);
3333 case 1: /* Class S */
3334 case 15: /* Class SB */
3335 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3336 t = TYPE_FIELDS (desc_type);
3337 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3338 t = tree_cons (min_field,
3339 convert (TREE_TYPE (min_field), integer_one_node),
3340 tree_cons (max_field,
3341 convert (TREE_TYPE (max_field), t),
3343 template_tree = gnat_build_constructor (template_type, t);
3344 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3346 /* For class S, we are done. */
3350 /* Test that we really have a SB descriptor, like DEC Ada. */
3351 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3352 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3353 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3354 /* If so, there is already a template in the descriptor and
3355 it is located right after the POINTER field. */
3356 t = TREE_CHAIN (pointer);
3358 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3359 /* Otherwise use the {1, LENGTH} template we build above. */
3360 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3361 build_unary_op (ADDR_EXPR, p_bounds_type,
3366 case 4: /* Class A */
3367 /* The AFLAGS field is the 7th field in the descriptor. */
3368 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3369 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3370 /* The DIMCT field is the 8th field in the descriptor. */
3372 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3373 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3374 or FL_COEFF or FL_BOUNDS not set. */
3375 u = build_int_cst (TREE_TYPE (aflags), 192);
3376 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3377 build_binary_op (NE_EXPR, integer_type_node,
3379 convert (TREE_TYPE (dimct),
3381 build_binary_op (NE_EXPR, integer_type_node,
3382 build2 (BIT_AND_EXPR,
3386 /* There is already a template in the descriptor and it is
3387 located at the start of block 3 (12th field). */
3388 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3390 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3391 template_tree = build3 (COND_EXPR, p_bounds_type, u,
3392 build_call_raise (CE_Length_Check_Failed, Empty,
3393 N_Raise_Constraint_Error),
3396 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3399 case 10: /* Class NCA */
3401 post_error ("unsupported descriptor type for &", gnat_subprog);
3402 template_addr = integer_zero_node;
3406 /* Build the fat pointer in the form of a constructor. */
3407 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3408 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3409 template_addr, NULL_TREE));
3411 return gnat_build_constructor (gnu_type, t);
3418 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3419 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3420 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3421 VMS descriptor is passed. */
3424 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3425 Entity_Id gnat_subprog)
3427 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3428 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3429 tree mbo = TYPE_FIELDS (desc_type);
3430 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3431 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3432 tree is64bit, gnu_expr32, gnu_expr64;
3434 /* If the field name is not MBO, it must be 32-bit and no alternate.
3435 Otherwise primary must be 64-bit and alternate 32-bit. */
3436 if (strcmp (mbostr, "MBO") != 0)
3437 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3439 /* Build the test for 64-bit descriptor. */
3440 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3441 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3443 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3444 build_binary_op (EQ_EXPR, integer_type_node,
3445 convert (integer_type_node, mbo),
3447 build_binary_op (EQ_EXPR, integer_type_node,
3448 convert (integer_type_node, mbmo),
3449 integer_minus_one_node));
3451 /* Build the 2 possible end results. */
3452 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3453 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3454 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3456 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3459 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3460 and the GNAT node GNAT_SUBPROG. */
3463 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3465 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3466 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3467 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3470 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3471 gnu_param_list = NULL_TREE;
3473 begin_subprog_body (gnu_stub_decl);
3476 start_stmt_group ();
3478 /* Loop over the parameters of the stub and translate any of them
3479 passed by descriptor into a by reference one. */
3480 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3481 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3483 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3484 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3486 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3488 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3490 DECL_PARM_ALT_TYPE (gnu_stub_param),
3493 gnu_param = gnu_stub_param;
3495 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3498 gnu_body = end_stmt_group ();
3500 /* Invoke the internal subprogram. */
3501 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3503 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3505 nreverse (gnu_param_list));
3507 /* Propagate the return value, if any. */
3508 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3509 append_to_statement_list (gnu_subprog_call, &gnu_body);
3511 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3517 allocate_struct_function (gnu_stub_decl, false);
3518 end_subprog_body (gnu_body, false);
3521 /* Build a type to be used to represent an aliased object whose nominal
3522 type is an unconstrained array. This consists of a RECORD_TYPE containing
3523 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3524 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3525 is used to represent an arbitrary unconstrained object. Use NAME
3526 as the name of the record. */
3529 build_unc_object_type (tree template_type, tree object_type, tree name)
3531 tree type = make_node (RECORD_TYPE);
3532 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3533 template_type, type, 0, 0, 0, 1);
3534 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3537 TYPE_NAME (type) = name;
3538 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3539 finish_record_type (type,
3540 chainon (chainon (NULL_TREE, template_field),
3547 /* Same, taking a thin or fat pointer type instead of a template type. */
3550 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3555 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3558 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3559 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3560 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3561 return build_unc_object_type (template_type, object_type, name);
3564 /* Shift the component offsets within an unconstrained object TYPE to make it
3565 suitable for use as a designated type for thin pointers. */
3568 shift_unc_components_for_thin_pointers (tree type)
3570 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3571 allocated past the BOUNDS template. The designated type is adjusted to
3572 have ARRAY at position zero and the template at a negative offset, so
3573 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3575 tree bounds_field = TYPE_FIELDS (type);
3576 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3578 DECL_FIELD_OFFSET (bounds_field)
3579 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3581 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3582 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3585 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3586 In the normal case this is just two adjustments, but we have more to
3587 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3590 update_pointer_to (tree old_type, tree new_type)
3592 tree ptr = TYPE_POINTER_TO (old_type);
3593 tree ref = TYPE_REFERENCE_TO (old_type);
3597 /* If this is the main variant, process all the other variants first. */
3598 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3599 for (type = TYPE_NEXT_VARIANT (old_type); type;
3600 type = TYPE_NEXT_VARIANT (type))
3601 update_pointer_to (type, new_type);
3603 /* If no pointers and no references, we are done. */
3607 /* Merge the old type qualifiers in the new type.
3609 Each old variant has qualifiers for specific reasons, and the new
3610 designated type as well. Each set of qualifiers represents useful
3611 information grabbed at some point, and merging the two simply unifies
3612 these inputs into the final type description.
3614 Consider for instance a volatile type frozen after an access to constant
3615 type designating it; after the designated type's freeze, we get here with
3616 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3617 when the access type was processed. We will make a volatile and readonly
3618 designated type, because that's what it really is.
3620 We might also get here for a non-dummy OLD_TYPE variant with different
3621 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3622 to private record type elaboration (see the comments around the call to
3623 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3624 the qualifiers in those cases too, to avoid accidentally discarding the
3625 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3627 = build_qualified_type (new_type,
3628 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3630 /* If old type and new type are identical, there is nothing to do. */
3631 if (old_type == new_type)
3634 /* Otherwise, first handle the simple case. */
3635 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3637 TYPE_POINTER_TO (new_type) = ptr;
3638 TYPE_REFERENCE_TO (new_type) = ref;
3640 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3641 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3642 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3643 TREE_TYPE (ptr1) = new_type;
3645 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3646 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3647 ref1 = TYPE_NEXT_VARIANT (ref1))
3648 TREE_TYPE (ref1) = new_type;
3651 /* Now deal with the unconstrained array case. In this case the "pointer"
3652 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3653 Turn them into pointers to the correct types using update_pointer_to. */
3654 else if (!TYPE_FAT_POINTER_P (ptr))
3659 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3660 tree array_field = TYPE_FIELDS (ptr);
3661 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3662 tree new_ptr = TYPE_POINTER_TO (new_type);
3666 /* Make pointers to the dummy template point to the real template. */
3668 (TREE_TYPE (TREE_TYPE (bounds_field)),
3669 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3671 /* The references to the template bounds present in the array type
3672 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3673 are updating PTR to make it a full replacement for NEW_PTR as
3674 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3675 to make it of type PTR. */
3676 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3677 build0 (PLACEHOLDER_EXPR, ptr),
3678 bounds_field, NULL_TREE);
3680 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3681 to the dummy array point to it. */
3683 (TREE_TYPE (TREE_TYPE (array_field)),
3684 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3685 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3687 /* Make PTR the pointer to NEW_TYPE. */
3688 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3689 = TREE_TYPE (new_type) = ptr;
3691 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3692 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3694 /* Now handle updating the allocation record, what the thin pointer
3695 points to. Update all pointers from the old record into the new
3696 one, update the type of the array field, and recompute the size. */
3697 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3699 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3700 = TREE_TYPE (TREE_TYPE (array_field));
3702 /* The size recomputation needs to account for alignment constraints, so
3703 we let layout_type work it out. This will reset the field offsets to
3704 what they would be in a regular record, so we shift them back to what
3705 we want them to be for a thin pointer designated type afterwards. */
3706 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3707 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3708 TYPE_SIZE (new_obj_rec) = 0;
3709 layout_type (new_obj_rec);
3711 shift_unc_components_for_thin_pointers (new_obj_rec);
3713 /* We are done, at last. */
3714 rest_of_record_type_compilation (ptr);
3718 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3719 unconstrained one. This involves making or finding a template. */
3722 convert_to_fat_pointer (tree type, tree expr)
3724 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3725 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3726 tree etype = TREE_TYPE (expr);
3729 /* If EXPR is null, make a fat pointer that contains null pointers to the
3730 template and array. */
3731 if (integer_zerop (expr))
3733 gnat_build_constructor
3735 tree_cons (TYPE_FIELDS (type),
3736 convert (p_array_type, expr),
3737 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3738 convert (build_pointer_type (template_type),
3742 /* If EXPR is a thin pointer, make template and data from the record.. */
3743 else if (TYPE_THIN_POINTER_P (etype))
3745 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3747 expr = save_expr (expr);
3748 if (TREE_CODE (expr) == ADDR_EXPR)
3749 expr = TREE_OPERAND (expr, 0);
3751 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3753 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3754 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3755 build_component_ref (expr, NULL_TREE,
3756 TREE_CHAIN (fields), false));
3759 /* Otherwise, build the constructor for the template. */
3761 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3763 /* The final result is a constructor for the fat pointer.
3765 If EXPR is an argument of a foreign convention subprogram, the type it
3766 points to is directly the component type. In this case, the expression
3767 type may not match the corresponding FIELD_DECL type at this point, so we
3768 call "convert" here to fix that up if necessary. This type consistency is
3769 required, for instance because it ensures that possible later folding of
3770 COMPONENT_REFs against this constructor always yields something of the
3771 same type as the initial reference.
3773 Note that the call to "build_template" above is still fine because it
3774 will only refer to the provided TEMPLATE_TYPE in this case. */
3776 gnat_build_constructor
3778 tree_cons (TYPE_FIELDS (type),
3779 convert (p_array_type, expr),
3780 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3781 build_unary_op (ADDR_EXPR, NULL_TREE,
3786 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3787 is something that is a fat pointer, so convert to it first if it EXPR
3788 is not already a fat pointer. */
3791 convert_to_thin_pointer (tree type, tree expr)
3793 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3795 = convert_to_fat_pointer
3796 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3798 /* We get the pointer to the data and use a NOP_EXPR to make it the
3800 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3802 expr = build1 (NOP_EXPR, type, expr);
3807 /* Create an expression whose value is that of EXPR,
3808 converted to type TYPE. The TREE_TYPE of the value
3809 is always TYPE. This function implements all reasonable
3810 conversions; callers should filter out those that are
3811 not permitted by the language being compiled. */
3814 convert (tree type, tree expr)
3816 enum tree_code code = TREE_CODE (type);
3817 tree etype = TREE_TYPE (expr);
3818 enum tree_code ecode = TREE_CODE (etype);
3820 /* If EXPR is already the right type, we are done. */
3824 /* If both input and output have padding and are of variable size, do this
3825 as an unchecked conversion. Likewise if one is a mere variant of the
3826 other, so we avoid a pointless unpad/repad sequence. */
3827 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3828 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3829 && (!TREE_CONSTANT (TYPE_SIZE (type))
3830 || !TREE_CONSTANT (TYPE_SIZE (etype))
3831 || gnat_types_compatible_p (type, etype)
3832 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3833 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3836 /* If the output type has padding, convert to the inner type and
3837 make a constructor to build the record. */
3838 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3840 /* If we previously converted from another type and our type is
3841 of variable size, remove the conversion to avoid the need for
3842 variable-size temporaries. Likewise for a conversion between
3843 original and packable version. */
3844 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3845 && (!TREE_CONSTANT (TYPE_SIZE (type))
3846 || (ecode == RECORD_TYPE
3847 && TYPE_NAME (etype)
3848 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3849 expr = TREE_OPERAND (expr, 0);
3851 /* If we are just removing the padding from expr, convert the original
3852 object if we have variable size in order to avoid the need for some
3853 variable-size temporaries. Likewise if the padding is a mere variant
3854 of the other, so we avoid a pointless unpad/repad sequence. */
3855 if (TREE_CODE (expr) == COMPONENT_REF
3856 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3857 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3858 && (!TREE_CONSTANT (TYPE_SIZE (type))
3859 || gnat_types_compatible_p (type,
3860 TREE_TYPE (TREE_OPERAND (expr, 0)))
3861 || (ecode == RECORD_TYPE
3862 && TYPE_NAME (etype)
3863 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3864 return convert (type, TREE_OPERAND (expr, 0));
3866 /* If the result type is a padded type with a self-referentially-sized
3867 field and the expression type is a record, do this as an
3868 unchecked conversion. */
3869 else if (TREE_CODE (etype) == RECORD_TYPE
3870 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3871 return unchecked_convert (type, expr, false);
3875 gnat_build_constructor (type,
3876 tree_cons (TYPE_FIELDS (type),
3878 (TYPE_FIELDS (type)),
3883 /* If the input type has padding, remove it and convert to the output type.
3884 The conditions ordering is arranged to ensure that the output type is not
3885 a padding type here, as it is not clear whether the conversion would
3886 always be correct if this was to happen. */
3887 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3891 /* If we have just converted to this padded type, just get the
3892 inner expression. */
3893 if (TREE_CODE (expr) == CONSTRUCTOR
3894 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3895 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3896 == TYPE_FIELDS (etype))
3898 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3900 /* Otherwise, build an explicit component reference. */
3903 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3905 return convert (type, unpadded);
3908 /* If the input is a biased type, adjust first. */
3909 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3910 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3911 fold_convert (TREE_TYPE (etype),
3913 TYPE_MIN_VALUE (etype)));
3915 /* If the input is a justified modular type, we need to extract the actual
3916 object before converting it to any other type with the exceptions of an
3917 unconstrained array or of a mere type variant. It is useful to avoid the
3918 extraction and conversion in the type variant case because it could end
3919 up replacing a VAR_DECL expr by a constructor and we might be about the
3920 take the address of the result. */
3921 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3922 && code != UNCONSTRAINED_ARRAY_TYPE
3923 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3924 return convert (type, build_component_ref (expr, NULL_TREE,
3925 TYPE_FIELDS (etype), false));
3927 /* If converting to a type that contains a template, convert to the data
3928 type and then build the template. */
3929 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3931 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3933 /* If the source already has a template, get a reference to the
3934 associated array only, as we are going to rebuild a template
3935 for the target type anyway. */
3936 expr = maybe_unconstrained_array (expr);
3939 gnat_build_constructor
3941 tree_cons (TYPE_FIELDS (type),
3942 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3943 obj_type, NULL_TREE),
3944 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3945 convert (obj_type, expr), NULL_TREE)));
3948 /* There are some special cases of expressions that we process
3950 switch (TREE_CODE (expr))
3956 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3957 conversion in gnat_expand_expr. NULL_EXPR does not represent
3958 and actual value, so no conversion is needed. */
3959 expr = copy_node (expr);
3960 TREE_TYPE (expr) = type;
3964 /* If we are converting a STRING_CST to another constrained array type,
3965 just make a new one in the proper type. */
3966 if (code == ecode && AGGREGATE_TYPE_P (etype)
3967 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3968 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3970 expr = copy_node (expr);
3971 TREE_TYPE (expr) = type;
3977 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3978 a new one in the proper type. */
3979 if (code == ecode && gnat_types_compatible_p (type, etype))
3981 expr = copy_node (expr);
3982 TREE_TYPE (expr) = type;
3986 /* Likewise for a conversion between original and packable version, but
3987 we have to work harder in order to preserve type consistency. */
3989 && code == RECORD_TYPE
3990 && TYPE_NAME (type) == TYPE_NAME (etype))
3992 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3993 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3994 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3995 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3996 unsigned HOST_WIDE_INT idx;
3999 /* Whether we need to clear TREE_CONSTANT et al. on the output
4000 constructor when we convert in place. */
4001 bool clear_constant = false;
4003 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4005 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4006 /* We expect only simple constructors. Otherwise, punt. */
4007 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4010 elt->value = convert (TREE_TYPE (field), value);
4012 /* If packing has made this field a bitfield and the input
4013 value couldn't be emitted statically any more, we need to
4014 clear TREE_CONSTANT on our output. */
4015 if (!clear_constant && TREE_CONSTANT (expr)
4016 && !CONSTRUCTOR_BITFIELD_P (efield)
4017 && CONSTRUCTOR_BITFIELD_P (field)
4018 && !initializer_constant_valid_for_bitfield_p (value))
4019 clear_constant = true;
4021 efield = TREE_CHAIN (efield);
4022 field = TREE_CHAIN (field);
4025 /* If we have been able to match and convert all the input fields
4026 to their output type, convert in place now. We'll fallback to a
4027 view conversion downstream otherwise. */
4030 expr = copy_node (expr);
4031 TREE_TYPE (expr) = type;
4032 CONSTRUCTOR_ELTS (expr) = v;
4034 TREE_CONSTANT (expr) = TREE_STATIC (expr) = false;
4040 case UNCONSTRAINED_ARRAY_REF:
4041 /* Convert this to the type of the inner array by getting the address of
4042 the array from the template. */
4043 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4044 build_component_ref (TREE_OPERAND (expr, 0),
4045 get_identifier ("P_ARRAY"),
4047 etype = TREE_TYPE (expr);
4048 ecode = TREE_CODE (etype);
4051 case VIEW_CONVERT_EXPR:
4053 /* GCC 4.x is very sensitive to type consistency overall, and view
4054 conversions thus are very frequent. Even though just "convert"ing
4055 the inner operand to the output type is fine in most cases, it
4056 might expose unexpected input/output type mismatches in special
4057 circumstances so we avoid such recursive calls when we can. */
4058 tree op0 = TREE_OPERAND (expr, 0);
4060 /* If we are converting back to the original type, we can just
4061 lift the input conversion. This is a common occurrence with
4062 switches back-and-forth amongst type variants. */
4063 if (type == TREE_TYPE (op0))
4066 /* Otherwise, if we're converting between two aggregate types, we
4067 might be allowed to substitute the VIEW_CONVERT_EXPR target type
4068 in place or to just convert the inner expression. */
4069 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4071 /* If we are converting between mere variants, we can just
4072 substitute the VIEW_CONVERT_EXPR in place. */
4073 if (gnat_types_compatible_p (type, etype))
4074 return build1 (VIEW_CONVERT_EXPR, type, op0);
4076 /* Otherwise, we may just bypass the input view conversion unless
4077 one of the types is a fat pointer, which is handled by
4078 specialized code below which relies on exact type matching. */
4079 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4080 return convert (type, op0);
4086 /* If both types are record types, just convert the pointer and
4087 make a new INDIRECT_REF.
4089 ??? Disable this for now since it causes problems with the
4090 code in build_binary_op for MODIFY_EXPR which wants to
4091 strip off conversions. But that code really is a mess and
4092 we need to do this a much better way some time. */
4094 && (TREE_CODE (type) == RECORD_TYPE
4095 || TREE_CODE (type) == UNION_TYPE)
4096 && (TREE_CODE (etype) == RECORD_TYPE
4097 || TREE_CODE (etype) == UNION_TYPE)
4098 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4099 return build_unary_op (INDIRECT_REF, NULL_TREE,
4100 convert (build_pointer_type (type),
4101 TREE_OPERAND (expr, 0)));
4108 /* Check for converting to a pointer to an unconstrained array. */
4109 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4110 return convert_to_fat_pointer (type, expr);
4112 /* If we are converting between two aggregate types that are mere
4113 variants, just make a VIEW_CONVERT_EXPR. */
4114 else if (code == ecode
4115 && AGGREGATE_TYPE_P (type)
4116 && gnat_types_compatible_p (type, etype))
4117 return build1 (VIEW_CONVERT_EXPR, type, expr);
4119 /* In all other cases of related types, make a NOP_EXPR. */
4120 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4121 || (code == INTEGER_CST && ecode == INTEGER_CST
4122 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4123 return fold_convert (type, expr);
4128 return fold_build1 (CONVERT_EXPR, type, expr);
4131 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4132 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4133 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4134 return unchecked_convert (type, expr, false);
4135 else if (TYPE_BIASED_REPRESENTATION_P (type))
4136 return fold_convert (type,
4137 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4138 convert (TREE_TYPE (type), expr),
4139 TYPE_MIN_VALUE (type)));
4141 /* ... fall through ... */
4145 /* If we are converting an additive expression to an integer type
4146 with lower precision, be wary of the optimization that can be
4147 applied by convert_to_integer. There are 2 problematic cases:
4148 - if the first operand was originally of a biased type,
4149 because we could be recursively called to convert it
4150 to an intermediate type and thus rematerialize the
4151 additive operator endlessly,
4152 - if the expression contains a placeholder, because an
4153 intermediate conversion that changes the sign could
4154 be inserted and thus introduce an artificial overflow
4155 at compile time when the placeholder is substituted. */
4156 if (code == INTEGER_TYPE
4157 && ecode == INTEGER_TYPE
4158 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4159 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4161 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4163 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4164 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4165 || CONTAINS_PLACEHOLDER_P (expr))
4166 return build1 (NOP_EXPR, type, expr);
4169 return fold (convert_to_integer (type, expr));
4172 case REFERENCE_TYPE:
4173 /* If converting between two pointers to records denoting
4174 both a template and type, adjust if needed to account
4175 for any differing offsets, since one might be negative. */
4176 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4179 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4180 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4181 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4182 sbitsize_int (BITS_PER_UNIT));
4184 expr = build1 (NOP_EXPR, type, expr);
4185 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4186 if (integer_zerop (byte_diff))
4189 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4190 fold (convert (sizetype, byte_diff)));
4193 /* If converting to a thin pointer, handle specially. */
4194 if (TYPE_THIN_POINTER_P (type)
4195 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4196 return convert_to_thin_pointer (type, expr);
4198 /* If converting fat pointer to normal pointer, get the pointer to the
4199 array and then convert it. */
4200 else if (TYPE_FAT_POINTER_P (etype))
4201 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4204 return fold (convert_to_pointer (type, expr));
4207 return fold (convert_to_real (type, expr));
4210 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4212 gnat_build_constructor
4213 (type, tree_cons (TYPE_FIELDS (type),
4214 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4217 /* ... fall through ... */
4220 /* In these cases, assume the front-end has validated the conversion.
4221 If the conversion is valid, it will be a bit-wise conversion, so
4222 it can be viewed as an unchecked conversion. */
4223 return unchecked_convert (type, expr, false);
4226 /* This is a either a conversion between a tagged type and some
4227 subtype, which we have to mark as a UNION_TYPE because of
4228 overlapping fields or a conversion of an Unchecked_Union. */
4229 return unchecked_convert (type, expr, false);
4231 case UNCONSTRAINED_ARRAY_TYPE:
4232 /* If EXPR is a constrained array, take its address, convert it to a
4233 fat pointer, and then dereference it. Likewise if EXPR is a
4234 record containing both a template and a constrained array.
4235 Note that a record representing a justified modular type
4236 always represents a packed constrained array. */
4237 if (ecode == ARRAY_TYPE
4238 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4239 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4240 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4243 (INDIRECT_REF, NULL_TREE,
4244 convert_to_fat_pointer (TREE_TYPE (type),
4245 build_unary_op (ADDR_EXPR,
4248 /* Do something very similar for converting one unconstrained
4249 array to another. */
4250 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4252 build_unary_op (INDIRECT_REF, NULL_TREE,
4253 convert (TREE_TYPE (type),
4254 build_unary_op (ADDR_EXPR,
4260 return fold (convert_to_complex (type, expr));
4267 /* Remove all conversions that are done in EXP. This includes converting
4268 from a padded type or to a justified modular type. If TRUE_ADDRESS
4269 is true, always return the address of the containing object even if
4270 the address is not bit-aligned. */
4273 remove_conversions (tree exp, bool true_address)
4275 switch (TREE_CODE (exp))
4279 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4280 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4282 remove_conversions (VEC_index (constructor_elt,
4283 CONSTRUCTOR_ELTS (exp), 0)->value,
4288 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4289 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4290 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4293 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4295 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4304 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4305 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4306 likewise return an expression pointing to the underlying array. */
4309 maybe_unconstrained_array (tree exp)
4311 enum tree_code code = TREE_CODE (exp);
4314 switch (TREE_CODE (TREE_TYPE (exp)))
4316 case UNCONSTRAINED_ARRAY_TYPE:
4317 if (code == UNCONSTRAINED_ARRAY_REF)
4320 = build_unary_op (INDIRECT_REF, NULL_TREE,
4321 build_component_ref (TREE_OPERAND (exp, 0),
4322 get_identifier ("P_ARRAY"),
4324 TREE_READONLY (new_exp) = TREE_STATIC (new_exp)
4325 = TREE_READONLY (exp);
4329 else if (code == NULL_EXPR)
4330 return build1 (NULL_EXPR,
4331 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4332 (TREE_TYPE (TREE_TYPE (exp))))),
4333 TREE_OPERAND (exp, 0));
4336 /* If this is a padded type, convert to the unpadded type and see if
4337 it contains a template. */
4338 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4340 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4341 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4342 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4344 build_component_ref (new_exp, NULL_TREE,
4346 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4349 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4351 build_component_ref (exp, NULL_TREE,
4352 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4362 /* Return true if EXPR is an expression that can be folded as an operand
4363 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4366 can_fold_for_view_convert_p (tree expr)
4370 /* The folder will fold NOP_EXPRs between integral types with the same
4371 precision (in the middle-end's sense). We cannot allow it if the
4372 types don't have the same precision in the Ada sense as well. */
4373 if (TREE_CODE (expr) != NOP_EXPR)
4376 t1 = TREE_TYPE (expr);
4377 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4379 /* Defer to the folder for non-integral conversions. */
4380 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4383 /* Only fold conversions that preserve both precisions. */
4384 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4385 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4391 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4392 If NOTRUNC_P is true, truncation operations should be suppressed.
4394 Special care is required with (source or target) integral types whose
4395 precision is not equal to their size, to make sure we fetch or assign
4396 the value bits whose location might depend on the endianness, e.g.
4398 Rmsize : constant := 8;
4399 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4401 type Bit_Array is array (1 .. Rmsize) of Boolean;
4402 pragma Pack (Bit_Array);
4404 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4406 Value : Int := 2#1000_0001#;
4407 Vbits : Bit_Array := To_Bit_Array (Value);
4409 we expect the 8 bits at Vbits'Address to always contain Value, while
4410 their original location depends on the endianness, at Value'Address
4411 on a little-endian architecture but not on a big-endian one. */
4414 unchecked_convert (tree type, tree expr, bool notrunc_p)
4416 tree etype = TREE_TYPE (expr);
4418 /* If the expression is already the right type, we are done. */
4422 /* If both types types are integral just do a normal conversion.
4423 Likewise for a conversion to an unconstrained array. */
4424 if ((((INTEGRAL_TYPE_P (type)
4425 && !(TREE_CODE (type) == INTEGER_TYPE
4426 && TYPE_VAX_FLOATING_POINT_P (type)))
4427 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4428 || (TREE_CODE (type) == RECORD_TYPE
4429 && TYPE_JUSTIFIED_MODULAR_P (type)))
4430 && ((INTEGRAL_TYPE_P (etype)
4431 && !(TREE_CODE (etype) == INTEGER_TYPE
4432 && TYPE_VAX_FLOATING_POINT_P (etype)))
4433 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4434 || (TREE_CODE (etype) == RECORD_TYPE
4435 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4436 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4438 if (TREE_CODE (etype) == INTEGER_TYPE
4439 && TYPE_BIASED_REPRESENTATION_P (etype))
4441 tree ntype = copy_type (etype);
4442 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4443 TYPE_MAIN_VARIANT (ntype) = ntype;
4444 expr = build1 (NOP_EXPR, ntype, expr);
4447 if (TREE_CODE (type) == INTEGER_TYPE
4448 && TYPE_BIASED_REPRESENTATION_P (type))
4450 tree rtype = copy_type (type);
4451 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4452 TYPE_MAIN_VARIANT (rtype) = rtype;
4453 expr = convert (rtype, expr);
4454 expr = build1 (NOP_EXPR, type, expr);
4457 expr = convert (type, expr);
4460 /* If we are converting to an integral type whose precision is not equal
4461 to its size, first unchecked convert to a record that contains an
4462 object of the output type. Then extract the field. */
4463 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4464 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4465 GET_MODE_BITSIZE (TYPE_MODE (type))))
4467 tree rec_type = make_node (RECORD_TYPE);
4468 tree field = create_field_decl (get_identifier ("OBJ"), type,
4469 rec_type, 1, 0, 0, 0);
4471 TYPE_FIELDS (rec_type) = field;
4472 layout_type (rec_type);
4474 expr = unchecked_convert (rec_type, expr, notrunc_p);
4475 expr = build_component_ref (expr, NULL_TREE, field, 0);
4478 /* Similarly if we are converting from an integral type whose precision
4479 is not equal to its size. */
4480 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4481 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4482 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4484 tree rec_type = make_node (RECORD_TYPE);
4486 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4489 TYPE_FIELDS (rec_type) = field;
4490 layout_type (rec_type);
4492 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4493 expr = unchecked_convert (type, expr, notrunc_p);
4496 /* We have a special case when we are converting between two
4497 unconstrained array types. In that case, take the address,
4498 convert the fat pointer types, and dereference. */
4499 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4500 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4501 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4502 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4503 build_unary_op (ADDR_EXPR, NULL_TREE,
4507 expr = maybe_unconstrained_array (expr);
4508 etype = TREE_TYPE (expr);
4509 if (can_fold_for_view_convert_p (expr))
4510 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4512 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4515 /* If the result is an integral type whose precision is not equal to its
4516 size, sign- or zero-extend the result. We need not do this if the input
4517 is an integral type of the same precision and signedness or if the output
4518 is a biased type or if both the input and output are unsigned. */
4520 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4521 && !(TREE_CODE (type) == INTEGER_TYPE
4522 && TYPE_BIASED_REPRESENTATION_P (type))
4523 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4524 GET_MODE_BITSIZE (TYPE_MODE (type)))
4525 && !(INTEGRAL_TYPE_P (etype)
4526 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4527 && operand_equal_p (TYPE_RM_SIZE (type),
4528 (TYPE_RM_SIZE (etype) != 0
4529 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4531 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4533 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4534 TYPE_UNSIGNED (type));
4536 = convert (base_type,
4537 size_binop (MINUS_EXPR,
4539 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4540 TYPE_RM_SIZE (type)));
4543 build_binary_op (RSHIFT_EXPR, base_type,
4544 build_binary_op (LSHIFT_EXPR, base_type,
4545 convert (base_type, expr),
4550 /* An unchecked conversion should never raise Constraint_Error. The code
4551 below assumes that GCC's conversion routines overflow the same way that
4552 the underlying hardware does. This is probably true. In the rare case
4553 when it is false, we can rely on the fact that such conversions are
4554 erroneous anyway. */
4555 if (TREE_CODE (expr) == INTEGER_CST)
4556 TREE_OVERFLOW (expr) = 0;
4558 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4559 show no longer constant. */
4560 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4561 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4563 TREE_CONSTANT (expr) = 0;
4568 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4569 the latter being a record type as predicated by Is_Record_Type. */
4572 tree_code_for_record_type (Entity_Id gnat_type)
4574 Node_Id component_list
4575 = Component_List (Type_Definition
4577 (Implementation_Base_Type (gnat_type))));
4580 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4581 we have a non-discriminant field outside a variant. In either case,
4582 it's a RECORD_TYPE. */
4584 if (!Is_Unchecked_Union (gnat_type))
4587 for (component = First_Non_Pragma (Component_Items (component_list));
4588 Present (component);
4589 component = Next_Non_Pragma (component))
4590 if (Ekind (Defining_Entity (component)) == E_Component)
4596 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4597 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4598 according to the presence of an alignment clause on the type or, if it
4599 is an array, on the component type. */
4602 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4604 gnat_type = Underlying_Type (gnat_type);
4606 *align_clause = Present (Alignment_Clause (gnat_type));
4608 if (Is_Array_Type (gnat_type))
4610 gnat_type = Underlying_Type (Component_Type (gnat_type));
4611 if (Present (Alignment_Clause (gnat_type)))
4612 *align_clause = true;
4615 if (!Is_Floating_Point_Type (gnat_type))
4618 if (UI_To_Int (Esize (gnat_type)) != 64)
4624 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4625 size is greater or equal to 64 bits, or an array of such a type. Set
4626 ALIGN_CLAUSE according to the presence of an alignment clause on the
4627 type or, if it is an array, on the component type. */
4630 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4632 gnat_type = Underlying_Type (gnat_type);
4634 *align_clause = Present (Alignment_Clause (gnat_type));
4636 if (Is_Array_Type (gnat_type))
4638 gnat_type = Underlying_Type (Component_Type (gnat_type));
4639 if (Present (Alignment_Clause (gnat_type)))
4640 *align_clause = true;
4643 if (!Is_Scalar_Type (gnat_type))
4646 if (UI_To_Int (Esize (gnat_type)) < 64)
4652 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4653 component of an aggregate type. */
4656 type_for_nonaliased_component_p (tree gnu_type)
4658 /* If the type is passed by reference, we may have pointers to the
4659 component so it cannot be made non-aliased. */
4660 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4663 /* We used to say that any component of aggregate type is aliased
4664 because the front-end may take 'Reference of it. The front-end
4665 has been enhanced in the meantime so as to use a renaming instead
4666 in most cases, but the back-end can probably take the address of
4667 such a component too so we go for the conservative stance.
4669 For instance, we might need the address of any array type, even
4670 if normally passed by copy, to construct a fat pointer if the
4671 component is used as an actual for an unconstrained formal.
4673 Likewise for record types: even if a specific record subtype is
4674 passed by copy, the parent type might be passed by ref (e.g. if
4675 it's of variable size) and we might take the address of a child
4676 component to pass to a parent formal. We have no way to check
4677 for such conditions here. */
4678 if (AGGREGATE_TYPE_P (gnu_type))
4684 /* Perform final processing on global variables. */
4687 gnat_write_global_declarations (void)
4689 /* Proceed to optimize and emit assembly.
4690 FIXME: shouldn't be the front end's responsibility to call this. */
4693 /* Emit debug info for all global declarations. */
4694 emit_debug_global_declarations (VEC_address (tree, global_decls),
4695 VEC_length (tree, global_decls));
4698 /* ************************************************************************
4699 * * GCC builtins support *
4700 * ************************************************************************ */
4702 /* The general scheme is fairly simple:
4704 For each builtin function/type to be declared, gnat_install_builtins calls
4705 internal facilities which eventually get to gnat_push_decl, which in turn
4706 tracks the so declared builtin function decls in the 'builtin_decls' global
4707 datastructure. When an Intrinsic subprogram declaration is processed, we
4708 search this global datastructure to retrieve the associated BUILT_IN DECL
4711 /* Search the chain of currently available builtin declarations for a node
4712 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4713 found, if any, or NULL_TREE otherwise. */
4715 builtin_decl_for (tree name)
4720 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4721 if (DECL_NAME (decl) == name)
4727 /* The code below eventually exposes gnat_install_builtins, which declares
4728 the builtin types and functions we might need, either internally or as
4729 user accessible facilities.
4731 ??? This is a first implementation shot, still in rough shape. It is
4732 heavily inspired from the "C" family implementation, with chunks copied
4733 verbatim from there.
4735 Two obvious TODO candidates are
4736 o Use a more efficient name/decl mapping scheme
4737 o Devise a middle-end infrastructure to avoid having to copy
4738 pieces between front-ends. */
4740 /* ----------------------------------------------------------------------- *
4741 * BUILTIN ELEMENTARY TYPES *
4742 * ----------------------------------------------------------------------- */
4744 /* Standard data types to be used in builtin argument declarations. */
4748 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4750 CTI_CONST_STRING_TYPE,
4755 static tree c_global_trees[CTI_MAX];
4757 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4758 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4759 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4761 /* ??? In addition some attribute handlers, we currently don't support a
4762 (small) number of builtin-types, which in turns inhibits support for a
4763 number of builtin functions. */
4764 #define wint_type_node void_type_node
4765 #define intmax_type_node void_type_node
4766 #define uintmax_type_node void_type_node
4768 /* Build the void_list_node (void_type_node having been created). */
4771 build_void_list_node (void)
4773 tree t = build_tree_list (NULL_TREE, void_type_node);
4777 /* Used to help initialize the builtin-types.def table. When a type of
4778 the correct size doesn't exist, use error_mark_node instead of NULL.
4779 The later results in segfaults even when a decl using the type doesn't
4783 builtin_type_for_size (int size, bool unsignedp)
4785 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4786 return type ? type : error_mark_node;
4789 /* Build/push the elementary type decls that builtin functions/types
4793 install_builtin_elementary_types (void)
4795 signed_size_type_node = size_type_node;
4796 pid_type_node = integer_type_node;
4797 void_list_node = build_void_list_node ();
4799 string_type_node = build_pointer_type (char_type_node);
4800 const_string_type_node
4801 = build_pointer_type (build_qualified_type
4802 (char_type_node, TYPE_QUAL_CONST));
4805 /* ----------------------------------------------------------------------- *
4806 * BUILTIN FUNCTION TYPES *
4807 * ----------------------------------------------------------------------- */
4809 /* Now, builtin function types per se. */
4813 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4814 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4815 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4816 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4817 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4818 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4819 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4820 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4821 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4822 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4823 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4824 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4825 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4826 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4827 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4829 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4830 #include "builtin-types.def"
4831 #undef DEF_PRIMITIVE_TYPE
4832 #undef DEF_FUNCTION_TYPE_0
4833 #undef DEF_FUNCTION_TYPE_1
4834 #undef DEF_FUNCTION_TYPE_2
4835 #undef DEF_FUNCTION_TYPE_3
4836 #undef DEF_FUNCTION_TYPE_4
4837 #undef DEF_FUNCTION_TYPE_5
4838 #undef DEF_FUNCTION_TYPE_6
4839 #undef DEF_FUNCTION_TYPE_7
4840 #undef DEF_FUNCTION_TYPE_VAR_0
4841 #undef DEF_FUNCTION_TYPE_VAR_1
4842 #undef DEF_FUNCTION_TYPE_VAR_2
4843 #undef DEF_FUNCTION_TYPE_VAR_3
4844 #undef DEF_FUNCTION_TYPE_VAR_4
4845 #undef DEF_FUNCTION_TYPE_VAR_5
4846 #undef DEF_POINTER_TYPE
4850 typedef enum c_builtin_type builtin_type;
4852 /* A temporary array used in communication with def_fn_type. */
4853 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4855 /* A helper function for install_builtin_types. Build function type
4856 for DEF with return type RET and N arguments. If VAR is true, then the
4857 function should be variadic after those N arguments.
4859 Takes special care not to ICE if any of the types involved are
4860 error_mark_node, which indicates that said type is not in fact available
4861 (see builtin_type_for_size). In which case the function type as a whole
4862 should be error_mark_node. */
4865 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4867 tree args = NULL, t;
4872 for (i = 0; i < n; ++i)
4874 builtin_type a = (builtin_type) va_arg (list, int);
4875 t = builtin_types[a];
4876 if (t == error_mark_node)
4878 args = tree_cons (NULL_TREE, t, args);
4882 args = nreverse (args);
4884 args = chainon (args, void_list_node);
4886 t = builtin_types[ret];
4887 if (t == error_mark_node)
4889 t = build_function_type (t, args);
4892 builtin_types[def] = t;
4895 /* Build the builtin function types and install them in the builtin_types
4896 array for later use in builtin function decls. */
4899 install_builtin_function_types (void)
4901 tree va_list_ref_type_node;
4902 tree va_list_arg_type_node;
4904 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4906 va_list_arg_type_node = va_list_ref_type_node =
4907 build_pointer_type (TREE_TYPE (va_list_type_node));
4911 va_list_arg_type_node = va_list_type_node;
4912 va_list_ref_type_node = build_reference_type (va_list_type_node);
4915 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4916 builtin_types[ENUM] = VALUE;
4917 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4918 def_fn_type (ENUM, RETURN, 0, 0);
4919 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4920 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4921 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4922 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4923 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4924 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4925 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4926 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4927 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4928 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4929 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4931 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4932 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4934 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4935 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4936 def_fn_type (ENUM, RETURN, 1, 0);
4937 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4938 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4939 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4940 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4941 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4942 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4943 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4944 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4945 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4946 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4947 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4948 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4950 #include "builtin-types.def"
4952 #undef DEF_PRIMITIVE_TYPE
4953 #undef DEF_FUNCTION_TYPE_1
4954 #undef DEF_FUNCTION_TYPE_2
4955 #undef DEF_FUNCTION_TYPE_3
4956 #undef DEF_FUNCTION_TYPE_4
4957 #undef DEF_FUNCTION_TYPE_5
4958 #undef DEF_FUNCTION_TYPE_6
4959 #undef DEF_FUNCTION_TYPE_VAR_0
4960 #undef DEF_FUNCTION_TYPE_VAR_1
4961 #undef DEF_FUNCTION_TYPE_VAR_2
4962 #undef DEF_FUNCTION_TYPE_VAR_3
4963 #undef DEF_FUNCTION_TYPE_VAR_4
4964 #undef DEF_FUNCTION_TYPE_VAR_5
4965 #undef DEF_POINTER_TYPE
4966 builtin_types[(int) BT_LAST] = NULL_TREE;
4969 /* ----------------------------------------------------------------------- *
4970 * BUILTIN ATTRIBUTES *
4971 * ----------------------------------------------------------------------- */
4973 enum built_in_attribute
4975 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4976 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4977 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4978 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4979 #include "builtin-attrs.def"
4980 #undef DEF_ATTR_NULL_TREE
4982 #undef DEF_ATTR_IDENT
4983 #undef DEF_ATTR_TREE_LIST
4987 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4990 install_builtin_attributes (void)
4992 /* Fill in the built_in_attributes array. */
4993 #define DEF_ATTR_NULL_TREE(ENUM) \
4994 built_in_attributes[(int) ENUM] = NULL_TREE;
4995 #define DEF_ATTR_INT(ENUM, VALUE) \
4996 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4997 #define DEF_ATTR_IDENT(ENUM, STRING) \
4998 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4999 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5000 built_in_attributes[(int) ENUM] \
5001 = tree_cons (built_in_attributes[(int) PURPOSE], \
5002 built_in_attributes[(int) VALUE], \
5003 built_in_attributes[(int) CHAIN]);
5004 #include "builtin-attrs.def"
5005 #undef DEF_ATTR_NULL_TREE
5007 #undef DEF_ATTR_IDENT
5008 #undef DEF_ATTR_TREE_LIST
5011 /* Handle a "const" attribute; arguments as in
5012 struct attribute_spec.handler. */
5015 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5016 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5019 if (TREE_CODE (*node) == FUNCTION_DECL)
5020 TREE_READONLY (*node) = 1;
5022 *no_add_attrs = true;
5027 /* Handle a "nothrow" attribute; arguments as in
5028 struct attribute_spec.handler. */
5031 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5032 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5035 if (TREE_CODE (*node) == FUNCTION_DECL)
5036 TREE_NOTHROW (*node) = 1;
5038 *no_add_attrs = true;
5043 /* Handle a "pure" attribute; arguments as in
5044 struct attribute_spec.handler. */
5047 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5048 int ARG_UNUSED (flags), bool *no_add_attrs)
5050 if (TREE_CODE (*node) == FUNCTION_DECL)
5051 DECL_PURE_P (*node) = 1;
5052 /* ??? TODO: Support types. */
5055 warning (OPT_Wattributes, "%qE attribute ignored", name);
5056 *no_add_attrs = true;
5062 /* Handle a "no vops" attribute; arguments as in
5063 struct attribute_spec.handler. */
5066 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5067 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5068 bool *ARG_UNUSED (no_add_attrs))
5070 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5071 DECL_IS_NOVOPS (*node) = 1;
5075 /* Helper for nonnull attribute handling; fetch the operand number
5076 from the attribute argument list. */
5079 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5081 /* Verify the arg number is a constant. */
5082 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5083 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5086 *valp = TREE_INT_CST_LOW (arg_num_expr);
5090 /* Handle the "nonnull" attribute. */
5092 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5093 tree args, int ARG_UNUSED (flags),
5097 unsigned HOST_WIDE_INT attr_arg_num;
5099 /* If no arguments are specified, all pointer arguments should be
5100 non-null. Verify a full prototype is given so that the arguments
5101 will have the correct types when we actually check them later. */
5104 if (!TYPE_ARG_TYPES (type))
5106 error ("nonnull attribute without arguments on a non-prototype");
5107 *no_add_attrs = true;
5112 /* Argument list specified. Verify that each argument number references
5113 a pointer argument. */
5114 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5117 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5119 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5121 error ("nonnull argument has invalid operand number (argument %lu)",
5122 (unsigned long) attr_arg_num);
5123 *no_add_attrs = true;
5127 argument = TYPE_ARG_TYPES (type);
5130 for (ck_num = 1; ; ck_num++)
5132 if (!argument || ck_num == arg_num)
5134 argument = TREE_CHAIN (argument);
5138 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5140 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5141 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5142 *no_add_attrs = true;
5146 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5148 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5149 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5150 *no_add_attrs = true;
5159 /* Handle a "sentinel" attribute. */
5162 handle_sentinel_attribute (tree *node, tree name, tree args,
5163 int ARG_UNUSED (flags), bool *no_add_attrs)
5165 tree params = TYPE_ARG_TYPES (*node);
5169 warning (OPT_Wattributes,
5170 "%qE attribute requires prototypes with named arguments", name);
5171 *no_add_attrs = true;
5175 while (TREE_CHAIN (params))
5176 params = TREE_CHAIN (params);
5178 if (VOID_TYPE_P (TREE_VALUE (params)))
5180 warning (OPT_Wattributes,
5181 "%qE attribute only applies to variadic functions", name);
5182 *no_add_attrs = true;
5188 tree position = TREE_VALUE (args);
5190 if (TREE_CODE (position) != INTEGER_CST)
5192 warning (0, "requested position is not an integer constant");
5193 *no_add_attrs = true;
5197 if (tree_int_cst_lt (position, integer_zero_node))
5199 warning (0, "requested position is less than zero");
5200 *no_add_attrs = true;
5208 /* Handle a "noreturn" attribute; arguments as in
5209 struct attribute_spec.handler. */
5212 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5213 int ARG_UNUSED (flags), bool *no_add_attrs)
5215 tree type = TREE_TYPE (*node);
5217 /* See FIXME comment in c_common_attribute_table. */
5218 if (TREE_CODE (*node) == FUNCTION_DECL)
5219 TREE_THIS_VOLATILE (*node) = 1;
5220 else if (TREE_CODE (type) == POINTER_TYPE
5221 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5223 = build_pointer_type
5224 (build_type_variant (TREE_TYPE (type),
5225 TYPE_READONLY (TREE_TYPE (type)), 1));
5228 warning (OPT_Wattributes, "%qE attribute ignored", name);
5229 *no_add_attrs = true;
5235 /* Handle a "malloc" attribute; arguments as in
5236 struct attribute_spec.handler. */
5239 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5240 int ARG_UNUSED (flags), bool *no_add_attrs)
5242 if (TREE_CODE (*node) == FUNCTION_DECL
5243 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5244 DECL_IS_MALLOC (*node) = 1;
5247 warning (OPT_Wattributes, "%qE attribute ignored", name);
5248 *no_add_attrs = true;
5254 /* Fake handler for attributes we don't properly support. */
5257 fake_attribute_handler (tree * ARG_UNUSED (node),
5258 tree ARG_UNUSED (name),
5259 tree ARG_UNUSED (args),
5260 int ARG_UNUSED (flags),
5261 bool * ARG_UNUSED (no_add_attrs))
5266 /* Handle a "type_generic" attribute. */
5269 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5270 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5271 bool * ARG_UNUSED (no_add_attrs))
5275 /* Ensure we have a function type. */
5276 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5278 params = TYPE_ARG_TYPES (*node);
5279 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5280 params = TREE_CHAIN (params);
5282 /* Ensure we have a variadic function. */
5283 gcc_assert (!params);
5288 /* Handle a "vector_size" attribute; arguments as in
5289 struct attribute_spec.handler. */
5292 handle_vector_size_attribute (tree *node, tree name, tree args,
5293 int ARG_UNUSED (flags),
5296 unsigned HOST_WIDE_INT vecsize, nunits;
5297 enum machine_mode orig_mode;
5298 tree type = *node, new_type, size;
5300 *no_add_attrs = true;
5302 size = TREE_VALUE (args);
5304 if (!host_integerp (size, 1))
5306 warning (OPT_Wattributes, "%qE attribute ignored", name);
5310 /* Get the vector size (in bytes). */
5311 vecsize = tree_low_cst (size, 1);
5313 /* We need to provide for vector pointers, vector arrays, and
5314 functions returning vectors. For example:
5316 __attribute__((vector_size(16))) short *foo;
5318 In this case, the mode is SI, but the type being modified is
5319 HI, so we need to look further. */
5321 while (POINTER_TYPE_P (type)
5322 || TREE_CODE (type) == FUNCTION_TYPE
5323 || TREE_CODE (type) == METHOD_TYPE
5324 || TREE_CODE (type) == ARRAY_TYPE
5325 || TREE_CODE (type) == OFFSET_TYPE)
5326 type = TREE_TYPE (type);
5328 /* Get the mode of the type being modified. */
5329 orig_mode = TYPE_MODE (type);
5331 if ((!INTEGRAL_TYPE_P (type)
5332 && !SCALAR_FLOAT_TYPE_P (type)
5333 && !FIXED_POINT_TYPE_P (type))
5334 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5335 && GET_MODE_CLASS (orig_mode) != MODE_INT
5336 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5337 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5338 || TREE_CODE (type) == BOOLEAN_TYPE)
5340 error ("invalid vector type for attribute %qE", name);
5344 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5346 error ("vector size not an integral multiple of component size");
5352 error ("zero vector size");
5356 /* Calculate how many units fit in the vector. */
5357 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5358 if (nunits & (nunits - 1))
5360 error ("number of components of the vector not a power of two");
5364 new_type = build_vector_type (type, nunits);
5366 /* Build back pointers if needed. */
5367 *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5372 /* ----------------------------------------------------------------------- *
5373 * BUILTIN FUNCTIONS *
5374 * ----------------------------------------------------------------------- */
5376 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5377 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5378 if nonansi_p and flag_no_nonansi_builtin. */
5381 def_builtin_1 (enum built_in_function fncode,
5383 enum built_in_class fnclass,
5384 tree fntype, tree libtype,
5385 bool both_p, bool fallback_p,
5386 bool nonansi_p ATTRIBUTE_UNUSED,
5387 tree fnattrs, bool implicit_p)
5390 const char *libname;
5392 /* Preserve an already installed decl. It most likely was setup in advance
5393 (e.g. as part of the internal builtins) for specific reasons. */
5394 if (built_in_decls[(int) fncode] != NULL_TREE)
5397 gcc_assert ((!both_p && !fallback_p)
5398 || !strncmp (name, "__builtin_",
5399 strlen ("__builtin_")));
5401 libname = name + strlen ("__builtin_");
5402 decl = add_builtin_function (name, fntype, fncode, fnclass,
5403 (fallback_p ? libname : NULL),
5406 /* ??? This is normally further controlled by command-line options
5407 like -fno-builtin, but we don't have them for Ada. */
5408 add_builtin_function (libname, libtype, fncode, fnclass,
5411 built_in_decls[(int) fncode] = decl;
5413 implicit_built_in_decls[(int) fncode] = decl;
5416 static int flag_isoc94 = 0;
5417 static int flag_isoc99 = 0;
5419 /* Install what the common builtins.def offers. */
5422 install_builtin_functions (void)
5424 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5425 NONANSI_P, ATTRS, IMPLICIT, COND) \
5427 def_builtin_1 (ENUM, NAME, CLASS, \
5428 builtin_types[(int) TYPE], \
5429 builtin_types[(int) LIBTYPE], \
5430 BOTH_P, FALLBACK_P, NONANSI_P, \
5431 built_in_attributes[(int) ATTRS], IMPLICIT);
5432 #include "builtins.def"
5436 /* ----------------------------------------------------------------------- *
5437 * BUILTIN FUNCTIONS *
5438 * ----------------------------------------------------------------------- */
5440 /* Install the builtin functions we might need. */
5443 gnat_install_builtins (void)
5445 install_builtin_elementary_types ();
5446 install_builtin_function_types ();
5447 install_builtin_attributes ();
5449 /* Install builtins used by generic middle-end pieces first. Some of these
5450 know about internal specificities and control attributes accordingly, for
5451 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5452 the generic definition from builtins.def. */
5453 build_common_builtin_nodes ();
5455 /* Now, install the target specific builtins, such as the AltiVec family on
5456 ppc, and the common set as exposed by builtins.def. */
5457 targetm.init_builtins ();
5458 install_builtin_functions ();
5461 #include "gt-ada-utils.h"
5462 #include "gtype-ada.h"