1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
32 #include "coretypes.h"
44 #include "langhooks.h"
45 #include "pointer-set.h"
47 #include "tree-dump.h"
48 #include "tree-inline.h"
49 #include "tree-iterator.h"
66 #ifndef MAX_FIXED_MODE_SIZE
67 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
70 #ifndef MAX_BITS_PER_WORD
71 #define MAX_BITS_PER_WORD BITS_PER_WORD
74 /* If nonzero, pretend we are allocating at global level. */
77 /* The default alignment of "double" floating-point types, i.e. floating
78 point types whose size is equal to 64 bits, or 0 if this alignment is
79 not specifically capped. */
80 int double_float_alignment;
82 /* The default alignment of "double" or larger scalar types, i.e. scalar
83 types whose size is greater or equal to 64 bits, or 0 if this alignment
84 is not specifically capped. */
85 int double_scalar_alignment;
87 /* Tree nodes for the various types and decls we create. */
88 tree gnat_std_decls[(int) ADT_LAST];
90 /* Functions to call for each of the possible raise reasons. */
91 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
93 /* Forward declarations for handlers of attributes. */
94 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
105 /* Fake handler for attributes we don't properly support, typically because
106 they'd require dragging a lot of the common-c front-end circuitry. */
107 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
109 /* Table of machine-independent internal attributes for Ada. We support
110 this minimal set of attributes to accommodate the needs of builtins. */
111 const struct attribute_spec gnat_internal_attribute_table[] =
113 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
114 { "const", 0, 0, true, false, false, handle_const_attribute },
115 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute },
117 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
118 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
119 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
121 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
122 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
124 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
125 { "may_alias", 0, 0, false, true, false, NULL },
127 /* ??? format and format_arg are heavy and not supported, which actually
128 prevents support for stdio builtins, which we however declare as part
129 of the common builtins.def contents. */
130 { "format", 3, 3, false, true, true, fake_attribute_handler },
131 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
133 { NULL, 0, 0, false, false, false, NULL }
136 /* Associates a GNAT tree node to a GCC tree node. It is used in
137 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
138 of `save_gnu_tree' for more info. */
139 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
141 #define GET_GNU_TREE(GNAT_ENTITY) \
142 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
144 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
145 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
147 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
148 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
150 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
151 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
153 #define GET_DUMMY_NODE(GNAT_ENTITY) \
154 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
156 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
157 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
159 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
160 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
162 /* This variable keeps a table for types for each precision so that we only
163 allocate each of them once. Signed and unsigned types are kept separate.
165 Note that these types are only used when fold-const requests something
166 special. Perhaps we should NOT share these types; we'll see how it
168 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
170 /* Likewise for float types, but record these by mode. */
171 static GTY(()) tree float_types[NUM_MACHINE_MODES];
173 /* For each binding contour we allocate a binding_level structure to indicate
174 the binding depth. */
176 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
177 /* The binding level containing this one (the enclosing binding level). */
178 struct gnat_binding_level *chain;
179 /* The BLOCK node for this level. */
181 /* If nonzero, the setjmp buffer that needs to be updated for any
182 variable-sized definition within this context. */
186 /* The binding level currently in effect. */
187 static GTY(()) struct gnat_binding_level *current_binding_level;
189 /* A chain of gnat_binding_level structures awaiting reuse. */
190 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
192 /* An array of global declarations. */
193 static GTY(()) VEC(tree,gc) *global_decls;
195 /* An array of builtin function declarations. */
196 static GTY(()) VEC(tree,gc) *builtin_decls;
198 /* An array of global renaming pointers. */
199 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
201 /* A chain of unused BLOCK nodes. */
202 static GTY((deletable)) tree free_block_chain;
204 static tree merge_sizes (tree, tree, tree, bool, bool);
205 static tree compute_related_constant (tree, tree);
206 static tree split_plus (tree, tree *);
207 static tree float_type_for_precision (int, enum machine_mode);
208 static tree convert_to_fat_pointer (tree, tree);
209 static tree convert_to_thin_pointer (tree, tree);
210 static tree make_descriptor_field (const char *,tree, tree, tree);
211 static bool potential_alignment_gap (tree, tree, tree);
213 /* Initialize the association of GNAT nodes to GCC trees. */
216 init_gnat_to_gnu (void)
218 associate_gnat_to_gnu
219 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
222 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
223 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
224 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
226 If GNU_DECL is zero, a previous association is to be reset. */
229 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
231 /* Check that GNAT_ENTITY is not already defined and that it is being set
232 to something which is a decl. Raise gigi 401 if not. Usually, this
233 means GNAT_ENTITY is defined twice, but occasionally is due to some
235 gcc_assert (!(gnu_decl
236 && (PRESENT_GNU_TREE (gnat_entity)
237 || (!no_check && !DECL_P (gnu_decl)))));
239 SET_GNU_TREE (gnat_entity, gnu_decl);
242 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
243 Return the ..._DECL node that was associated with it. If there is no tree
244 node associated with GNAT_ENTITY, abort.
246 In some cases, such as delayed elaboration or expressions that need to
247 be elaborated only once, GNAT_ENTITY is really not an entity. */
250 get_gnu_tree (Entity_Id gnat_entity)
252 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
253 return GET_GNU_TREE (gnat_entity);
256 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
259 present_gnu_tree (Entity_Id gnat_entity)
261 return PRESENT_GNU_TREE (gnat_entity);
264 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
267 init_dummy_type (void)
270 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
273 /* Make a dummy type corresponding to GNAT_TYPE. */
276 make_dummy_type (Entity_Id gnat_type)
278 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
281 /* If there is an equivalent type, get its underlying type. */
282 if (Present (gnat_underlying))
283 gnat_underlying = Underlying_Type (gnat_underlying);
285 /* If there was no equivalent type (can only happen when just annotating
286 types) or underlying type, go back to the original type. */
287 if (No (gnat_underlying))
288 gnat_underlying = gnat_type;
290 /* If it there already a dummy type, use that one. Else make one. */
291 if (PRESENT_DUMMY_NODE (gnat_underlying))
292 return GET_DUMMY_NODE (gnat_underlying);
294 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
296 gnu_type = make_node (Is_Record_Type (gnat_underlying)
297 ? tree_code_for_record_type (gnat_underlying)
299 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
300 TYPE_DUMMY_P (gnu_type) = 1;
301 TYPE_STUB_DECL (gnu_type)
302 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
303 if (AGGREGATE_TYPE_P (gnu_type))
304 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
306 SET_DUMMY_NODE (gnat_underlying, gnu_type);
311 /* Return nonzero if we are currently in the global binding level. */
314 global_bindings_p (void)
316 return ((force_global || !current_function_decl) ? -1 : 0);
319 /* Enter a new binding level. */
322 gnat_pushlevel (void)
324 struct gnat_binding_level *newlevel = NULL;
326 /* Reuse a struct for this binding level, if there is one. */
327 if (free_binding_level)
329 newlevel = free_binding_level;
330 free_binding_level = free_binding_level->chain;
334 = (struct gnat_binding_level *)
335 ggc_alloc (sizeof (struct gnat_binding_level));
337 /* Use a free BLOCK, if any; otherwise, allocate one. */
338 if (free_block_chain)
340 newlevel->block = free_block_chain;
341 free_block_chain = BLOCK_CHAIN (free_block_chain);
342 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
345 newlevel->block = make_node (BLOCK);
347 /* Point the BLOCK we just made to its parent. */
348 if (current_binding_level)
349 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
351 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
352 TREE_USED (newlevel->block) = 1;
354 /* Add this level to the front of the chain (stack) of levels that are
356 newlevel->chain = current_binding_level;
357 newlevel->jmpbuf_decl = NULL_TREE;
358 current_binding_level = newlevel;
361 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
362 and point FNDECL to this BLOCK. */
365 set_current_block_context (tree fndecl)
367 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
368 DECL_INITIAL (fndecl) = current_binding_level->block;
371 /* Set the jmpbuf_decl for the current binding level to DECL. */
374 set_block_jmpbuf_decl (tree decl)
376 current_binding_level->jmpbuf_decl = decl;
379 /* Get the jmpbuf_decl, if any, for the current binding level. */
382 get_block_jmpbuf_decl (void)
384 return current_binding_level->jmpbuf_decl;
387 /* Exit a binding level. Set any BLOCK into the current code group. */
392 struct gnat_binding_level *level = current_binding_level;
393 tree block = level->block;
395 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
396 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
398 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
399 are no variables free the block and merge its subblocks into those of its
400 parent block. Otherwise, add it to the list of its parent. */
401 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
403 else if (BLOCK_VARS (block) == NULL_TREE)
405 BLOCK_SUBBLOCKS (level->chain->block)
406 = chainon (BLOCK_SUBBLOCKS (block),
407 BLOCK_SUBBLOCKS (level->chain->block));
408 BLOCK_CHAIN (block) = free_block_chain;
409 free_block_chain = block;
413 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
414 BLOCK_SUBBLOCKS (level->chain->block) = block;
415 TREE_USED (block) = 1;
416 set_block_for_group (block);
419 /* Free this binding structure. */
420 current_binding_level = level->chain;
421 level->chain = free_binding_level;
422 free_binding_level = level;
426 /* Records a ..._DECL node DECL as belonging to the current lexical scope
427 and uses GNAT_NODE for location information and propagating flags. */
430 gnat_pushdecl (tree decl, Node_Id gnat_node)
432 /* If this decl is public external or at toplevel, there is no context.
433 But PARM_DECLs always go in the level of its function. */
434 if (TREE_CODE (decl) != PARM_DECL
435 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
436 || global_bindings_p ()))
437 DECL_CONTEXT (decl) = 0;
440 DECL_CONTEXT (decl) = current_function_decl;
442 /* Functions imported in another function are not really nested. */
443 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
444 DECL_NO_STATIC_CHAIN (decl) = 1;
447 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
449 /* Set the location of DECL and emit a declaration for it. */
450 if (Present (gnat_node))
451 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
452 add_decl_expr (decl, gnat_node);
454 /* Put the declaration on the list. The list of declarations is in reverse
455 order. The list will be reversed later. Put global variables in the
456 globals list and builtin functions in a dedicated list to speed up
457 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
458 the list, as they will cause trouble with the debugger and aren't needed
460 if (TREE_CODE (decl) != TYPE_DECL
461 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
463 if (global_bindings_p ())
465 VEC_safe_push (tree, gc, global_decls, decl);
467 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
468 VEC_safe_push (tree, gc, builtin_decls, decl);
472 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
473 BLOCK_VARS (current_binding_level->block) = decl;
477 /* For the declaration of a type, set its name if it either is not already
478 set or if the previous type name was not derived from a source name.
479 We'd rather have the type named with a real name and all the pointer
480 types to the same object have the same POINTER_TYPE node. Code in the
481 equivalent function of c-decl.c makes a copy of the type node here, but
482 that may cause us trouble with incomplete types. We make an exception
483 for fat pointer types because the compiler automatically builds them
484 for unconstrained array types and the debugger uses them to represent
485 both these and pointers to these. */
486 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
488 tree t = TREE_TYPE (decl);
490 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
492 else if (TYPE_FAT_POINTER_P (t))
494 tree tt = build_variant_type_copy (t);
495 TYPE_NAME (tt) = decl;
496 TREE_USED (tt) = TREE_USED (t);
497 TREE_TYPE (decl) = tt;
498 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
499 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
501 DECL_ORIGINAL_TYPE (decl) = t;
503 DECL_ARTIFICIAL (decl) = 0;
505 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
510 /* Propagate the name to all the variants. This is needed for
511 the type qualifiers machinery to work properly. */
513 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
514 TYPE_NAME (t) = decl;
518 /* Do little here. Set up the standard declarations later after the
519 front end has been run. */
522 gnat_init_decl_processing (void)
524 /* Make the binding_level structure for global names. */
525 current_function_decl = 0;
526 current_binding_level = 0;
527 free_binding_level = 0;
530 build_common_tree_nodes (true, true);
532 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
533 corresponding to the width of Pmode. In most cases when ptr_mode
534 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
535 But we get far better code using the width of Pmode. */
536 size_type_node = gnat_type_for_mode (Pmode, 0);
537 set_sizetype (size_type_node);
539 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
540 boolean_type_node = make_unsigned_type (8);
541 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
542 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
543 build_int_cst (boolean_type_node, 1));
544 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
546 build_common_tree_nodes_2 (0);
547 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
549 ptr_void_type_node = build_pointer_type (void_type_node);
552 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
555 record_builtin_type (const char *name, tree type)
557 tree type_decl = build_decl (input_location,
558 TYPE_DECL, get_identifier (name), type);
560 gnat_pushdecl (type_decl, Empty);
562 if (debug_hooks->type_decl)
563 debug_hooks->type_decl (type_decl, false);
566 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
567 finish constructing the record or union type. If REP_LEVEL is zero, this
568 record has no representation clause and so will be entirely laid out here.
569 If REP_LEVEL is one, this record has a representation clause and has been
570 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
571 this record is derived from a parent record and thus inherits its layout;
572 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
573 true, the record type is expected to be modified afterwards so it will
574 not be sent to the back-end for finalization. */
577 finish_record_type (tree record_type, tree fieldlist, int rep_level,
578 bool do_not_finalize)
580 enum tree_code code = TREE_CODE (record_type);
581 tree name = TYPE_NAME (record_type);
582 tree ada_size = bitsize_zero_node;
583 tree size = bitsize_zero_node;
584 bool had_size = TYPE_SIZE (record_type) != 0;
585 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
586 bool had_align = TYPE_ALIGN (record_type) != 0;
589 TYPE_FIELDS (record_type) = fieldlist;
591 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
592 generate debug info and have a parallel type. */
593 if (name && TREE_CODE (name) == TYPE_DECL)
594 name = DECL_NAME (name);
595 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
597 /* Globally initialize the record first. If this is a rep'ed record,
598 that just means some initializations; otherwise, layout the record. */
601 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
602 SET_TYPE_MODE (record_type, BLKmode);
605 TYPE_SIZE_UNIT (record_type) = size_zero_node;
607 TYPE_SIZE (record_type) = bitsize_zero_node;
609 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
610 out just like a UNION_TYPE, since the size will be fixed. */
611 else if (code == QUAL_UNION_TYPE)
616 /* Ensure there isn't a size already set. There can be in an error
617 case where there is a rep clause but all fields have errors and
618 no longer have a position. */
619 TYPE_SIZE (record_type) = 0;
620 layout_type (record_type);
623 /* At this point, the position and size of each field is known. It was
624 either set before entry by a rep clause, or by laying out the type above.
626 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
627 to compute the Ada size; the GCC size and alignment (for rep'ed records
628 that are not padding types); and the mode (for rep'ed records). We also
629 clear the DECL_BIT_FIELD indication for the cases we know have not been
630 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
632 if (code == QUAL_UNION_TYPE)
633 fieldlist = nreverse (fieldlist);
635 for (field = fieldlist; field; field = TREE_CHAIN (field))
637 tree type = TREE_TYPE (field);
638 tree pos = bit_position (field);
639 tree this_size = DECL_SIZE (field);
642 if ((TREE_CODE (type) == RECORD_TYPE
643 || TREE_CODE (type) == UNION_TYPE
644 || TREE_CODE (type) == QUAL_UNION_TYPE)
645 && !TYPE_IS_FAT_POINTER_P (type)
646 && !TYPE_CONTAINS_TEMPLATE_P (type)
647 && TYPE_ADA_SIZE (type))
648 this_ada_size = TYPE_ADA_SIZE (type);
650 this_ada_size = this_size;
652 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
653 if (DECL_BIT_FIELD (field)
654 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
656 unsigned int align = TYPE_ALIGN (type);
658 /* In the general case, type alignment is required. */
659 if (value_factor_p (pos, align))
661 /* The enclosing record type must be sufficiently aligned.
662 Otherwise, if no alignment was specified for it and it
663 has been laid out already, bump its alignment to the
664 desired one if this is compatible with its size. */
665 if (TYPE_ALIGN (record_type) >= align)
667 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
668 DECL_BIT_FIELD (field) = 0;
672 && value_factor_p (TYPE_SIZE (record_type), align))
674 TYPE_ALIGN (record_type) = align;
675 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
676 DECL_BIT_FIELD (field) = 0;
680 /* In the non-strict alignment case, only byte alignment is. */
681 if (!STRICT_ALIGNMENT
682 && DECL_BIT_FIELD (field)
683 && value_factor_p (pos, BITS_PER_UNIT))
684 DECL_BIT_FIELD (field) = 0;
687 /* If we still have DECL_BIT_FIELD set at this point, we know that the
688 field is technically not addressable. Except that it can actually
689 be addressed if it is BLKmode and happens to be properly aligned. */
690 if (DECL_BIT_FIELD (field)
691 && !(DECL_MODE (field) == BLKmode
692 && value_factor_p (pos, BITS_PER_UNIT)))
693 DECL_NONADDRESSABLE_P (field) = 1;
695 /* A type must be as aligned as its most aligned field that is not
696 a bit-field. But this is already enforced by layout_type. */
697 if (rep_level > 0 && !DECL_BIT_FIELD (field))
698 TYPE_ALIGN (record_type)
699 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
704 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
705 size = size_binop (MAX_EXPR, size, this_size);
708 case QUAL_UNION_TYPE:
710 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
711 this_ada_size, ada_size);
712 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
717 /* Since we know here that all fields are sorted in order of
718 increasing bit position, the size of the record is one
719 higher than the ending bit of the last field processed
720 unless we have a rep clause, since in that case we might
721 have a field outside a QUAL_UNION_TYPE that has a higher ending
722 position. So use a MAX in that case. Also, if this field is a
723 QUAL_UNION_TYPE, we need to take into account the previous size in
724 the case of empty variants. */
726 = merge_sizes (ada_size, pos, this_ada_size,
727 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
729 = merge_sizes (size, pos, this_size,
730 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
738 if (code == QUAL_UNION_TYPE)
739 nreverse (fieldlist);
741 /* If the type is discriminated, it can be used to access all its
742 constrained subtypes, so force structural equality checks. */
743 if (CONTAINS_PLACEHOLDER_P (size))
744 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
748 /* If this is a padding record, we never want to make the size smaller
749 than what was specified in it, if any. */
750 if (TREE_CODE (record_type) == RECORD_TYPE
751 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
752 size = TYPE_SIZE (record_type);
754 /* Now set any of the values we've just computed that apply. */
755 if (!TYPE_IS_FAT_POINTER_P (record_type)
756 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
757 SET_TYPE_ADA_SIZE (record_type, ada_size);
761 tree size_unit = had_size_unit
762 ? TYPE_SIZE_UNIT (record_type)
764 size_binop (CEIL_DIV_EXPR, size,
766 unsigned int align = TYPE_ALIGN (record_type);
768 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
769 TYPE_SIZE_UNIT (record_type)
770 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
772 compute_record_mode (record_type);
776 if (!do_not_finalize)
777 rest_of_record_type_compilation (record_type);
780 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
781 the debug information associated with it. It need not be invoked
782 directly in most cases since finish_record_type takes care of doing
783 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
786 rest_of_record_type_compilation (tree record_type)
788 tree fieldlist = TYPE_FIELDS (record_type);
790 enum tree_code code = TREE_CODE (record_type);
791 bool var_size = false;
793 for (field = fieldlist; field; field = TREE_CHAIN (field))
795 /* We need to make an XVE/XVU record if any field has variable size,
796 whether or not the record does. For example, if we have a union,
797 it may be that all fields, rounded up to the alignment, have the
798 same size, in which case we'll use that size. But the debug
799 output routines (except Dwarf2) won't be able to output the fields,
800 so we need to make the special record. */
801 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
802 /* If a field has a non-constant qualifier, the record will have
803 variable size too. */
804 || (code == QUAL_UNION_TYPE
805 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
812 /* If this record is of variable size, rename it so that the
813 debugger knows it is and make a new, parallel, record
814 that tells the debugger how the record is laid out. See
815 exp_dbug.ads. But don't do this for records that are padding
816 since they confuse GDB. */
818 && !(TREE_CODE (record_type) == RECORD_TYPE
819 && TYPE_IS_PADDING_P (record_type)))
822 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
823 ? UNION_TYPE : TREE_CODE (record_type));
824 tree orig_name = TYPE_NAME (record_type), new_name;
825 tree last_pos = bitsize_zero_node;
826 tree old_field, prev_old_field = NULL_TREE;
828 if (TREE_CODE (orig_name) == TYPE_DECL)
829 orig_name = DECL_NAME (orig_name);
832 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
834 TYPE_NAME (new_record_type) = new_name;
835 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
836 TYPE_STUB_DECL (new_record_type)
837 = create_type_stub_decl (new_name, new_record_type);
838 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
839 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
840 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
841 TYPE_SIZE_UNIT (new_record_type)
842 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
844 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
846 /* Now scan all the fields, replacing each field with a new
847 field corresponding to the new encoding. */
848 for (old_field = TYPE_FIELDS (record_type); old_field;
849 old_field = TREE_CHAIN (old_field))
851 tree field_type = TREE_TYPE (old_field);
852 tree field_name = DECL_NAME (old_field);
854 tree curpos = bit_position (old_field);
856 unsigned int align = 0;
859 /* See how the position was modified from the last position.
861 There are two basic cases we support: a value was added
862 to the last position or the last position was rounded to
863 a boundary and they something was added. Check for the
864 first case first. If not, see if there is any evidence
865 of rounding. If so, round the last position and try
868 If this is a union, the position can be taken as zero. */
870 /* Some computations depend on the shape of the position expression,
871 so strip conversions to make sure it's exposed. */
872 curpos = remove_conversions (curpos, true);
874 if (TREE_CODE (new_record_type) == UNION_TYPE)
875 pos = bitsize_zero_node, align = 0;
877 pos = compute_related_constant (curpos, last_pos);
879 if (!pos && TREE_CODE (curpos) == MULT_EXPR
880 && host_integerp (TREE_OPERAND (curpos, 1), 1))
882 tree offset = TREE_OPERAND (curpos, 0);
883 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
885 /* An offset which is a bitwise AND with a negative power of 2
886 means an alignment corresponding to this power of 2. */
887 offset = remove_conversions (offset, true);
888 if (TREE_CODE (offset) == BIT_AND_EXPR
889 && host_integerp (TREE_OPERAND (offset, 1), 0)
890 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
893 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
894 if (exact_log2 (pow) > 0)
898 pos = compute_related_constant (curpos,
899 round_up (last_pos, align));
901 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
902 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
903 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
904 && host_integerp (TREE_OPERAND
905 (TREE_OPERAND (curpos, 0), 1),
910 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
911 pos = compute_related_constant (curpos,
912 round_up (last_pos, align));
914 else if (potential_alignment_gap (prev_old_field, old_field,
917 align = TYPE_ALIGN (field_type);
918 pos = compute_related_constant (curpos,
919 round_up (last_pos, align));
922 /* If we can't compute a position, set it to zero.
924 ??? We really should abort here, but it's too much work
925 to get this correct for all cases. */
928 pos = bitsize_zero_node;
930 /* See if this type is variable-sized and make a pointer type
931 and indicate the indirection if so. Beware that the debug
932 back-end may adjust the position computed above according
933 to the alignment of the field type, i.e. the pointer type
934 in this case, if we don't preventively counter that. */
935 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
937 field_type = build_pointer_type (field_type);
938 if (align != 0 && TYPE_ALIGN (field_type) > align)
940 field_type = copy_node (field_type);
941 TYPE_ALIGN (field_type) = align;
946 /* Make a new field name, if necessary. */
947 if (var || align != 0)
952 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
953 align / BITS_PER_UNIT);
955 strcpy (suffix, "XVL");
957 field_name = concat_name (field_name, suffix);
960 new_field = create_field_decl (field_name, field_type,
962 DECL_SIZE (old_field), pos, 0);
963 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
964 TYPE_FIELDS (new_record_type) = new_field;
966 /* If old_field is a QUAL_UNION_TYPE, take its size as being
967 zero. The only time it's not the last field of the record
968 is when there are other components at fixed positions after
969 it (meaning there was a rep clause for every field) and we
970 want to be able to encode them. */
971 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
972 (TREE_CODE (TREE_TYPE (old_field))
975 : DECL_SIZE (old_field));
976 prev_old_field = old_field;
979 TYPE_FIELDS (new_record_type)
980 = nreverse (TYPE_FIELDS (new_record_type));
982 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
985 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
988 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
991 add_parallel_type (tree decl, tree parallel_type)
995 while (DECL_PARALLEL_TYPE (d))
996 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
998 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1001 /* Return the parallel type associated to a type, if any. */
1004 get_parallel_type (tree type)
1006 if (TYPE_STUB_DECL (type))
1007 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1012 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1013 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1014 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1015 replace a value of zero with the old size. If HAS_REP is true, we take the
1016 MAX of the end position of this field with LAST_SIZE. In all other cases,
1017 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1020 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1023 tree type = TREE_TYPE (last_size);
1026 if (!special || TREE_CODE (size) != COND_EXPR)
1028 new_size = size_binop (PLUS_EXPR, first_bit, size);
1030 new_size = size_binop (MAX_EXPR, last_size, new_size);
1034 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1035 integer_zerop (TREE_OPERAND (size, 1))
1036 ? last_size : merge_sizes (last_size, first_bit,
1037 TREE_OPERAND (size, 1),
1039 integer_zerop (TREE_OPERAND (size, 2))
1040 ? last_size : merge_sizes (last_size, first_bit,
1041 TREE_OPERAND (size, 2),
1044 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1045 when fed through substitute_in_expr) into thinking that a constant
1046 size is not constant. */
1047 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1048 new_size = TREE_OPERAND (new_size, 0);
1053 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1054 related by the addition of a constant. Return that constant if so. */
1057 compute_related_constant (tree op0, tree op1)
1059 tree op0_var, op1_var;
1060 tree op0_con = split_plus (op0, &op0_var);
1061 tree op1_con = split_plus (op1, &op1_var);
1062 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1064 if (operand_equal_p (op0_var, op1_var, 0))
1066 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1072 /* Utility function of above to split a tree OP which may be a sum, into a
1073 constant part, which is returned, and a variable part, which is stored
1074 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1078 split_plus (tree in, tree *pvar)
1080 /* Strip NOPS in order to ease the tree traversal and maximize the
1081 potential for constant or plus/minus discovery. We need to be careful
1082 to always return and set *pvar to bitsizetype trees, but it's worth
1086 *pvar = convert (bitsizetype, in);
1088 if (TREE_CODE (in) == INTEGER_CST)
1090 *pvar = bitsize_zero_node;
1091 return convert (bitsizetype, in);
1093 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1095 tree lhs_var, rhs_var;
1096 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1097 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1099 if (lhs_var == TREE_OPERAND (in, 0)
1100 && rhs_var == TREE_OPERAND (in, 1))
1101 return bitsize_zero_node;
1103 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1104 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1107 return bitsize_zero_node;
1110 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1111 subprogram. If it is void_type_node, then we are dealing with a procedure,
1112 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1113 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1114 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1115 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1116 object. RETURNS_BY_REF is true if the function returns by reference.
1117 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1118 first parameter) the address of the place to copy its result. */
1121 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1122 bool returns_unconstrained, bool returns_by_ref,
1123 bool returns_by_target_ptr)
1125 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1126 the subprogram formal parameters. This list is generated by traversing the
1127 input list of PARM_DECL nodes. */
1128 tree param_type_list = NULL;
1132 for (param_decl = param_decl_list; param_decl;
1133 param_decl = TREE_CHAIN (param_decl))
1134 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1137 /* The list of the function parameter types has to be terminated by the void
1138 type to signal to the back-end that we are not dealing with a variable
1139 parameter subprogram, but that the subprogram has a fixed number of
1141 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1143 /* The list of argument types has been created in reverse
1145 param_type_list = nreverse (param_type_list);
1147 type = build_function_type (return_type, param_type_list);
1149 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1150 or the new type should, make a copy of TYPE. Likewise for
1151 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1152 if (TYPE_CI_CO_LIST (type) || cico_list
1153 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1154 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1155 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1156 type = copy_type (type);
1158 TYPE_CI_CO_LIST (type) = cico_list;
1159 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1160 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1161 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1165 /* Return a copy of TYPE but safe to modify in any way. */
1168 copy_type (tree type)
1170 tree new_type = copy_node (type);
1172 /* copy_node clears this field instead of copying it, because it is
1173 aliased with TREE_CHAIN. */
1174 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1176 TYPE_POINTER_TO (new_type) = 0;
1177 TYPE_REFERENCE_TO (new_type) = 0;
1178 TYPE_MAIN_VARIANT (new_type) = new_type;
1179 TYPE_NEXT_VARIANT (new_type) = 0;
1184 /* Return a subtype of sizetype with range MIN to MAX and whose
1185 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1186 of the associated TYPE_DECL. */
1189 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1191 /* First build a type for the desired range. */
1192 tree type = build_index_2_type (min, max);
1194 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1195 if (TYPE_INDEX_TYPE (type) == index)
1198 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1199 no way of sharing these types, but that's only a small hole. */
1200 if (TYPE_INDEX_TYPE (type))
1201 type = copy_type (type);
1203 SET_TYPE_INDEX_TYPE (type, index);
1204 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1209 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1210 sizetype is used. */
1213 create_range_type (tree type, tree min, tree max)
1217 if (type == NULL_TREE)
1220 /* First build a type with the base range. */
1222 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1224 min = convert (type, min);
1225 max = convert (type, max);
1227 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1228 if (TYPE_RM_MIN_VALUE (range_type)
1229 && TYPE_RM_MAX_VALUE (range_type)
1230 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1231 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1234 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1235 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1236 range_type = copy_type (range_type);
1238 /* Then set the actual range. */
1239 SET_TYPE_RM_MIN_VALUE (range_type, min);
1240 SET_TYPE_RM_MAX_VALUE (range_type, max);
1245 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1246 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1250 create_type_stub_decl (tree type_name, tree type)
1252 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1253 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1254 emitted in DWARF. */
1255 tree type_decl = build_decl (input_location,
1256 TYPE_DECL, type_name, type);
1257 DECL_ARTIFICIAL (type_decl) = 1;
1261 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1262 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1263 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1264 true if we need to write debug information about this type. GNAT_NODE
1265 is used for the position of the decl. */
1268 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1269 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1271 enum tree_code code = TREE_CODE (type);
1272 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1275 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1276 gcc_assert (!TYPE_IS_DUMMY_P (type));
1278 /* If the type hasn't been named yet, we're naming it; preserve an existing
1279 TYPE_STUB_DECL that has been attached to it for some purpose. */
1280 if (!named && TYPE_STUB_DECL (type))
1282 type_decl = TYPE_STUB_DECL (type);
1283 DECL_NAME (type_decl) = type_name;
1286 type_decl = build_decl (input_location,
1287 TYPE_DECL, type_name, type);
1289 DECL_ARTIFICIAL (type_decl) = artificial_p;
1290 gnat_pushdecl (type_decl, gnat_node);
1291 process_attributes (type_decl, attr_list);
1293 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1294 This causes the name to be also viewed as a "tag" by the debug
1295 back-end, with the advantage that no DW_TAG_typedef is emitted
1296 for artificial "tagged" types in DWARF. */
1298 TYPE_STUB_DECL (type) = type_decl;
1300 /* Pass the type declaration to the debug back-end unless this is an
1301 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1302 type for which debugging information was not requested, or else an
1303 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1304 handled separately. And do not pass dummy types either. */
1305 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1306 DECL_IGNORED_P (type_decl) = 1;
1307 else if (code != ENUMERAL_TYPE
1308 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1309 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1310 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1311 && !(code == RECORD_TYPE
1313 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1314 rest_of_type_decl_compilation (type_decl);
1319 /* Return a VAR_DECL or CONST_DECL node.
1321 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1322 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1323 the GCC tree for an optional initial expression; NULL_TREE if none.
1325 CONST_FLAG is true if this variable is constant, in which case we might
1326 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1328 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1329 definition to be made visible outside of the current compilation unit, for
1330 instance variable definitions in a package specification.
1332 EXTERN_FLAG is true when processing an external variable declaration (as
1333 opposed to a definition: no storage is to be allocated for the variable).
1335 STATIC_FLAG is only relevant when not at top level. In that case
1336 it indicates whether to always allocate storage to the variable.
1338 GNAT_NODE is used for the position of the decl. */
1341 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1342 bool const_flag, bool public_flag, bool extern_flag,
1343 bool static_flag, bool const_decl_allowed_p,
1344 struct attrib *attr_list, Node_Id gnat_node)
1348 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1349 && (global_bindings_p () || static_flag
1350 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1351 : TREE_CONSTANT (var_init)));
1353 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1354 case the initializer may be used in-lieu of the DECL node (as done in
1355 Identifier_to_gnu). This is useful to prevent the need of elaboration
1356 code when an identifier for which such a decl is made is in turn used as
1357 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1358 but extra constraints apply to this choice (see below) and are not
1359 relevant to the distinction we wish to make. */
1360 bool constant_p = const_flag && init_const;
1362 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1363 and may be used for scalars in general but not for aggregates. */
1365 = build_decl (input_location,
1366 (constant_p && const_decl_allowed_p
1367 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1370 /* If this is external, throw away any initializations (they will be done
1371 elsewhere) unless this is a constant for which we would like to remain
1372 able to get the initializer. If we are defining a global here, leave a
1373 constant initialization and save any variable elaborations for the
1374 elaboration routine. If we are just annotating types, throw away the
1375 initialization if it isn't a constant. */
1376 if ((extern_flag && !constant_p)
1377 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1378 var_init = NULL_TREE;
1380 /* At the global level, an initializer requiring code to be generated
1381 produces elaboration statements. Check that such statements are allowed,
1382 that is, not violating a No_Elaboration_Code restriction. */
1383 if (global_bindings_p () && var_init != 0 && ! init_const)
1384 Check_Elaboration_Code_Allowed (gnat_node);
1386 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1387 try to fiddle with DECL_COMMON. However, on platforms that don't
1388 support global BSS sections, uninitialized global variables would
1389 go in DATA instead, thus increasing the size of the executable. */
1391 && TREE_CODE (var_decl) == VAR_DECL
1392 && !have_global_bss_p ())
1393 DECL_COMMON (var_decl) = 1;
1394 DECL_INITIAL (var_decl) = var_init;
1395 TREE_READONLY (var_decl) = const_flag;
1396 DECL_EXTERNAL (var_decl) = extern_flag;
1397 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1398 TREE_CONSTANT (var_decl) = constant_p;
1399 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1400 = TYPE_VOLATILE (type);
1402 /* If it's public and not external, always allocate storage for it.
1403 At the global binding level we need to allocate static storage for the
1404 variable if and only if it's not external. If we are not at the top level
1405 we allocate automatic storage unless requested not to. */
1406 TREE_STATIC (var_decl)
1407 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1409 /* For an external constant whose initializer is not absolute, do not emit
1410 debug info. In DWARF this would mean a global relocation in a read-only
1411 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1414 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1415 != null_pointer_node)
1416 DECL_IGNORED_P (var_decl) = 1;
1418 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1419 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1421 process_attributes (var_decl, attr_list);
1423 /* Add this decl to the current binding level. */
1424 gnat_pushdecl (var_decl, gnat_node);
1426 if (TREE_SIDE_EFFECTS (var_decl))
1427 TREE_ADDRESSABLE (var_decl) = 1;
1429 if (TREE_CODE (var_decl) != CONST_DECL)
1431 if (global_bindings_p ())
1432 rest_of_decl_compilation (var_decl, true, 0);
1435 expand_decl (var_decl);
1440 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1443 aggregate_type_contains_array_p (tree type)
1445 switch (TREE_CODE (type))
1449 case QUAL_UNION_TYPE:
1452 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1453 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1454 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1467 /* Return a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1468 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1469 this field is in a record type with a "pragma pack". If SIZE is nonzero
1470 it is the specified size for this field. If POS is nonzero, it is the bit
1471 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1472 the address of this field for aliasing purposes. If it is negative, we
1473 should not make a bitfield, which is used by make_aligning_type. */
1476 create_field_decl (tree field_name, tree field_type, tree record_type,
1477 int packed, tree size, tree pos, int addressable)
1479 tree field_decl = build_decl (input_location,
1480 FIELD_DECL, field_name, field_type);
1482 DECL_CONTEXT (field_decl) = record_type;
1483 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1485 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1486 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1487 Likewise for an aggregate without specified position that contains an
1488 array, because in this case slices of variable length of this array
1489 must be handled by GCC and variable-sized objects need to be aligned
1490 to at least a byte boundary. */
1491 if (packed && (TYPE_MODE (field_type) == BLKmode
1493 && AGGREGATE_TYPE_P (field_type)
1494 && aggregate_type_contains_array_p (field_type))))
1495 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1497 /* If a size is specified, use it. Otherwise, if the record type is packed
1498 compute a size to use, which may differ from the object's natural size.
1499 We always set a size in this case to trigger the checks for bitfield
1500 creation below, which is typically required when no position has been
1503 size = convert (bitsizetype, size);
1504 else if (packed == 1)
1506 size = rm_size (field_type);
1508 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1510 if (TREE_CODE (size) == INTEGER_CST
1511 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1512 size = round_up (size, BITS_PER_UNIT);
1515 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1516 specified for two reasons: first if the size differs from the natural
1517 size. Second, if the alignment is insufficient. There are a number of
1518 ways the latter can be true.
1520 We never make a bitfield if the type of the field has a nonconstant size,
1521 because no such entity requiring bitfield operations should reach here.
1523 We do *preventively* make a bitfield when there might be the need for it
1524 but we don't have all the necessary information to decide, as is the case
1525 of a field with no specified position in a packed record.
1527 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1528 in layout_decl or finish_record_type to clear the bit_field indication if
1529 it is in fact not needed. */
1530 if (addressable >= 0
1532 && TREE_CODE (size) == INTEGER_CST
1533 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1534 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1535 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1537 || (TYPE_ALIGN (record_type) != 0
1538 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1540 DECL_BIT_FIELD (field_decl) = 1;
1541 DECL_SIZE (field_decl) = size;
1542 if (!packed && !pos)
1544 if (TYPE_ALIGN (record_type) != 0
1545 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1546 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1548 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1552 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1554 /* Bump the alignment if need be, either for bitfield/packing purposes or
1555 to satisfy the type requirements if no such consideration applies. When
1556 we get the alignment from the type, indicate if this is from an explicit
1557 user request, which prevents stor-layout from lowering it later on. */
1559 unsigned int bit_align
1560 = (DECL_BIT_FIELD (field_decl) ? 1
1561 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1563 if (bit_align > DECL_ALIGN (field_decl))
1564 DECL_ALIGN (field_decl) = bit_align;
1565 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1567 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1568 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1574 /* We need to pass in the alignment the DECL is known to have.
1575 This is the lowest-order bit set in POS, but no more than
1576 the alignment of the record, if one is specified. Note
1577 that an alignment of 0 is taken as infinite. */
1578 unsigned int known_align;
1580 if (host_integerp (pos, 1))
1581 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1583 known_align = BITS_PER_UNIT;
1585 if (TYPE_ALIGN (record_type)
1586 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1587 known_align = TYPE_ALIGN (record_type);
1589 layout_decl (field_decl, known_align);
1590 SET_DECL_OFFSET_ALIGN (field_decl,
1591 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1593 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1594 &DECL_FIELD_BIT_OFFSET (field_decl),
1595 DECL_OFFSET_ALIGN (field_decl), pos);
1598 /* In addition to what our caller says, claim the field is addressable if we
1599 know that its type is not suitable.
1601 The field may also be "technically" nonaddressable, meaning that even if
1602 we attempt to take the field's address we will actually get the address
1603 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1604 value we have at this point is not accurate enough, so we don't account
1605 for this here and let finish_record_type decide. */
1606 if (!addressable && !type_for_nonaliased_component_p (field_type))
1609 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1614 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1615 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1616 (either an In parameter or an address of a pass-by-ref parameter). */
1619 create_param_decl (tree param_name, tree param_type, bool readonly)
1621 tree param_decl = build_decl (input_location,
1622 PARM_DECL, param_name, param_type);
1624 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1625 can lead to various ABI violations. */
1626 if (targetm.calls.promote_prototypes (NULL_TREE)
1627 && INTEGRAL_TYPE_P (param_type)
1628 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1630 /* We have to be careful about biased types here. Make a subtype
1631 of integer_type_node with the proper biasing. */
1632 if (TREE_CODE (param_type) == INTEGER_TYPE
1633 && TYPE_BIASED_REPRESENTATION_P (param_type))
1636 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1637 TREE_TYPE (subtype) = integer_type_node;
1638 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1639 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1640 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1641 param_type = subtype;
1644 param_type = integer_type_node;
1647 DECL_ARG_TYPE (param_decl) = param_type;
1648 TREE_READONLY (param_decl) = readonly;
1652 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1655 process_attributes (tree decl, struct attrib *attr_list)
1657 for (; attr_list; attr_list = attr_list->next)
1658 switch (attr_list->type)
1660 case ATTR_MACHINE_ATTRIBUTE:
1661 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1663 ATTR_FLAG_TYPE_IN_PLACE);
1666 case ATTR_LINK_ALIAS:
1667 if (! DECL_EXTERNAL (decl))
1669 TREE_STATIC (decl) = 1;
1670 assemble_alias (decl, attr_list->name);
1674 case ATTR_WEAK_EXTERNAL:
1676 declare_weak (decl);
1678 post_error ("?weak declarations not supported on this target",
1679 attr_list->error_point);
1682 case ATTR_LINK_SECTION:
1683 if (targetm.have_named_sections)
1685 DECL_SECTION_NAME (decl)
1686 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1687 IDENTIFIER_POINTER (attr_list->name));
1688 DECL_COMMON (decl) = 0;
1691 post_error ("?section attributes are not supported for this target",
1692 attr_list->error_point);
1695 case ATTR_LINK_CONSTRUCTOR:
1696 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1697 TREE_USED (decl) = 1;
1700 case ATTR_LINK_DESTRUCTOR:
1701 DECL_STATIC_DESTRUCTOR (decl) = 1;
1702 TREE_USED (decl) = 1;
1705 case ATTR_THREAD_LOCAL_STORAGE:
1706 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1707 DECL_COMMON (decl) = 0;
1712 /* Record DECL as a global renaming pointer. */
1715 record_global_renaming_pointer (tree decl)
1717 gcc_assert (DECL_RENAMED_OBJECT (decl));
1718 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1721 /* Invalidate the global renaming pointers. */
1724 invalidate_global_renaming_pointers (void)
1729 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1730 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1732 VEC_free (tree, gc, global_renaming_pointers);
1735 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1739 value_factor_p (tree value, HOST_WIDE_INT factor)
1741 if (host_integerp (value, 1))
1742 return tree_low_cst (value, 1) % factor == 0;
1744 if (TREE_CODE (value) == MULT_EXPR)
1745 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1746 || value_factor_p (TREE_OPERAND (value, 1), factor));
1751 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1752 unless we can prove these 2 fields are laid out in such a way that no gap
1753 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1754 is the distance in bits between the end of PREV_FIELD and the starting
1755 position of CURR_FIELD. It is ignored if null. */
1758 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1760 /* If this is the first field of the record, there cannot be any gap */
1764 /* If the previous field is a union type, then return False: The only
1765 time when such a field is not the last field of the record is when
1766 there are other components at fixed positions after it (meaning there
1767 was a rep clause for every field), in which case we don't want the
1768 alignment constraint to override them. */
1769 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1772 /* If the distance between the end of prev_field and the beginning of
1773 curr_field is constant, then there is a gap if the value of this
1774 constant is not null. */
1775 if (offset && host_integerp (offset, 1))
1776 return !integer_zerop (offset);
1778 /* If the size and position of the previous field are constant,
1779 then check the sum of this size and position. There will be a gap
1780 iff it is not multiple of the current field alignment. */
1781 if (host_integerp (DECL_SIZE (prev_field), 1)
1782 && host_integerp (bit_position (prev_field), 1))
1783 return ((tree_low_cst (bit_position (prev_field), 1)
1784 + tree_low_cst (DECL_SIZE (prev_field), 1))
1785 % DECL_ALIGN (curr_field) != 0);
1787 /* If both the position and size of the previous field are multiples
1788 of the current field alignment, there cannot be any gap. */
1789 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1790 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1793 /* Fallback, return that there may be a potential gap */
1797 /* Returns a LABEL_DECL node for LABEL_NAME. */
1800 create_label_decl (tree label_name)
1802 tree label_decl = build_decl (input_location,
1803 LABEL_DECL, label_name, void_type_node);
1805 DECL_CONTEXT (label_decl) = current_function_decl;
1806 DECL_MODE (label_decl) = VOIDmode;
1807 DECL_SOURCE_LOCATION (label_decl) = input_location;
1812 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1813 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1814 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1815 PARM_DECL nodes chained through the TREE_CHAIN field).
1817 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1818 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1821 create_subprog_decl (tree subprog_name, tree asm_name,
1822 tree subprog_type, tree param_decl_list, bool inline_flag,
1823 bool public_flag, bool extern_flag,
1824 struct attrib *attr_list, Node_Id gnat_node)
1826 tree return_type = TREE_TYPE (subprog_type);
1827 tree subprog_decl = build_decl (input_location,
1828 FUNCTION_DECL, subprog_name, subprog_type);
1830 /* If this is a non-inline function nested inside an inlined external
1831 function, we cannot honor both requests without cloning the nested
1832 function in the current unit since it is private to the other unit.
1833 We could inline the nested function as well but it's probably better
1834 to err on the side of too little inlining. */
1836 && current_function_decl
1837 && DECL_DECLARED_INLINE_P (current_function_decl)
1838 && DECL_EXTERNAL (current_function_decl))
1839 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1841 DECL_EXTERNAL (subprog_decl) = extern_flag;
1842 TREE_PUBLIC (subprog_decl) = public_flag;
1843 TREE_STATIC (subprog_decl) = 1;
1844 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1845 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1846 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1847 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1848 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1849 DECL_RESULT (subprog_decl) = build_decl (input_location,
1850 RESULT_DECL, 0, return_type);
1851 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1852 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1854 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1855 target by-reference return mechanism. This is not supported all the
1856 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1857 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1858 the RESULT_DECL instead - see gnat_genericize for more details. */
1859 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1861 tree result_decl = DECL_RESULT (subprog_decl);
1863 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1864 DECL_BY_REFERENCE (result_decl) = 1;
1869 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1871 /* The expand_main_function circuitry expects "main_identifier_node" to
1872 designate the DECL_NAME of the 'main' entry point, in turn expected
1873 to be declared as the "main" function literally by default. Ada
1874 program entry points are typically declared with a different name
1875 within the binder generated file, exported as 'main' to satisfy the
1876 system expectations. Redirect main_identifier_node in this case. */
1877 if (asm_name == main_identifier_node)
1878 main_identifier_node = DECL_NAME (subprog_decl);
1881 process_attributes (subprog_decl, attr_list);
1883 /* Add this decl to the current binding level. */
1884 gnat_pushdecl (subprog_decl, gnat_node);
1886 /* Output the assembler code and/or RTL for the declaration. */
1887 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1889 return subprog_decl;
1892 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1893 body. This routine needs to be invoked before processing the declarations
1894 appearing in the subprogram. */
1897 begin_subprog_body (tree subprog_decl)
1901 current_function_decl = subprog_decl;
1902 announce_function (subprog_decl);
1904 /* Enter a new binding level and show that all the parameters belong to
1907 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1908 param_decl = TREE_CHAIN (param_decl))
1909 DECL_CONTEXT (param_decl) = subprog_decl;
1911 make_decl_rtl (subprog_decl);
1913 /* We handle pending sizes via the elaboration of types, so we don't need to
1914 save them. This causes them to be marked as part of the outer function
1915 and then discarded. */
1916 get_pending_sizes ();
1920 /* Helper for the genericization callback. Return a dereference of VAL
1921 if it is of a reference type. */
1924 convert_from_reference (tree val)
1926 tree value_type, ref;
1928 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1931 value_type = TREE_TYPE (TREE_TYPE (val));
1932 ref = build1 (INDIRECT_REF, value_type, val);
1934 /* See if what we reference is CONST or VOLATILE, which requires
1935 looking into array types to get to the component type. */
1937 while (TREE_CODE (value_type) == ARRAY_TYPE)
1938 value_type = TREE_TYPE (value_type);
1941 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1942 TREE_THIS_VOLATILE (ref)
1943 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1945 TREE_SIDE_EFFECTS (ref)
1946 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1951 /* Helper for the genericization callback. Returns true if T denotes
1952 a RESULT_DECL with DECL_BY_REFERENCE set. */
1955 is_byref_result (tree t)
1957 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1961 /* Tree walking callback for gnat_genericize. Currently ...
1963 o Adjust references to the function's DECL_RESULT if it is marked
1964 DECL_BY_REFERENCE and so has had its type turned into a reference
1965 type at the end of the function compilation. */
1968 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1970 /* This implementation is modeled after what the C++ front-end is
1971 doing, basis of the downstream passes behavior. */
1973 tree stmt = *stmt_p;
1974 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1976 /* If we have a direct mention of the result decl, dereference. */
1977 if (is_byref_result (stmt))
1979 *stmt_p = convert_from_reference (stmt);
1984 /* Otherwise, no need to walk the same tree twice. */
1985 if (pointer_set_contains (p_set, stmt))
1991 /* If we are taking the address of what now is a reference, just get the
1993 if (TREE_CODE (stmt) == ADDR_EXPR
1994 && is_byref_result (TREE_OPERAND (stmt, 0)))
1996 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2000 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2001 else if (TREE_CODE (stmt) == RETURN_EXPR
2002 && TREE_OPERAND (stmt, 0)
2003 && is_byref_result (TREE_OPERAND (stmt, 0)))
2006 /* Don't look inside trees that cannot embed references of interest. */
2007 else if (IS_TYPE_OR_DECL_P (stmt))
2010 pointer_set_insert (p_set, *stmt_p);
2015 /* Perform lowering of Ada trees to GENERIC. In particular:
2017 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2018 and adjust all the references to this decl accordingly. */
2021 gnat_genericize (tree fndecl)
2023 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2024 was handled by simply setting TREE_ADDRESSABLE on the result type.
2025 Everything required to actually pass by invisible ref using the target
2026 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2028 This doesn't work with GCC 4 any more for several reasons. First, the
2029 gimplification process might need the creation of temporaries of this
2030 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2031 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2032 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2033 be explicitly accounted for by the front-end in the function body.
2035 We achieve the complete transformation in two steps:
2037 1/ create_subprog_decl performs early attribute tweaks: it clears
2038 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2039 the result decl. The former ensures that the bit isn't set in the GCC
2040 tree saved for the function, so prevents ICEs on temporary creation.
2041 The latter we use here to trigger the rest of the processing.
2043 2/ This function performs the type transformation on the result decl
2044 and adjusts all the references to this decl from the function body
2047 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2048 strategy, which escapes the gimplifier temporary creation issues by
2049 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2050 on simple specific support code in aggregate_value_p to look at the
2051 target function result decl explicitly. */
2053 struct pointer_set_t *p_set;
2054 tree decl_result = DECL_RESULT (fndecl);
2056 if (!DECL_BY_REFERENCE (decl_result))
2059 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2060 occurrences in the function body using the common tree-walking facility.
2061 We want to see every occurrence of the result decl to adjust the
2062 referencing tree, so need to use our own pointer set to control which
2063 trees should be visited again or not. */
2065 p_set = pointer_set_create ();
2067 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2068 TREE_ADDRESSABLE (decl_result) = 0;
2069 relayout_decl (decl_result);
2071 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2073 pointer_set_destroy (p_set);
2076 /* Finish the definition of the current subprogram BODY and finalize it. */
2079 end_subprog_body (tree body)
2081 tree fndecl = current_function_decl;
2083 /* Mark the BLOCK for this level as being for this function and pop the
2084 level. Since the vars in it are the parameters, clear them. */
2085 BLOCK_VARS (current_binding_level->block) = 0;
2086 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2087 DECL_INITIAL (fndecl) = current_binding_level->block;
2090 /* We handle pending sizes via the elaboration of types, so we don't
2091 need to save them. */
2092 get_pending_sizes ();
2094 /* Mark the RESULT_DECL as being in this subprogram. */
2095 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2097 DECL_SAVED_TREE (fndecl) = body;
2099 current_function_decl = DECL_CONTEXT (fndecl);
2102 /* We cannot track the location of errors past this point. */
2103 error_gnat_node = Empty;
2105 /* If we're only annotating types, don't actually compile this function. */
2106 if (type_annotate_only)
2109 /* Perform the required pre-gimplification transformations on the tree. */
2110 gnat_genericize (fndecl);
2112 /* Dump functions before gimplification. */
2113 dump_function (TDI_original, fndecl);
2115 /* ??? This special handling of nested functions is probably obsolete. */
2116 if (!DECL_CONTEXT (fndecl))
2117 cgraph_finalize_function (fndecl, false);
2119 /* Register this function with cgraph just far enough to get it
2120 added to our parent's nested function list. */
2121 (void) cgraph_node (fndecl);
2125 gnat_builtin_function (tree decl)
2127 gnat_pushdecl (decl, Empty);
2131 /* Return an integer type with the number of bits of precision given by
2132 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2133 it is a signed type. */
2136 gnat_type_for_size (unsigned precision, int unsignedp)
2141 if (precision <= 2 * MAX_BITS_PER_WORD
2142 && signed_and_unsigned_types[precision][unsignedp])
2143 return signed_and_unsigned_types[precision][unsignedp];
2146 t = make_unsigned_type (precision);
2148 t = make_signed_type (precision);
2150 if (precision <= 2 * MAX_BITS_PER_WORD)
2151 signed_and_unsigned_types[precision][unsignedp] = t;
2155 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2156 TYPE_NAME (t) = get_identifier (type_name);
2162 /* Likewise for floating-point types. */
2165 float_type_for_precision (int precision, enum machine_mode mode)
2170 if (float_types[(int) mode])
2171 return float_types[(int) mode];
2173 float_types[(int) mode] = t = make_node (REAL_TYPE);
2174 TYPE_PRECISION (t) = precision;
2177 gcc_assert (TYPE_MODE (t) == mode);
2180 sprintf (type_name, "FLOAT_%d", precision);
2181 TYPE_NAME (t) = get_identifier (type_name);
2187 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2188 an unsigned type; otherwise a signed type is returned. */
2191 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2193 if (mode == BLKmode)
2195 else if (mode == VOIDmode)
2196 return void_type_node;
2197 else if (COMPLEX_MODE_P (mode))
2199 else if (SCALAR_FLOAT_MODE_P (mode))
2200 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2201 else if (SCALAR_INT_MODE_P (mode))
2202 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2207 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2210 gnat_unsigned_type (tree type_node)
2212 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2214 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2216 type = copy_node (type);
2217 TREE_TYPE (type) = type_node;
2219 else if (TREE_TYPE (type_node)
2220 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2221 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2223 type = copy_node (type);
2224 TREE_TYPE (type) = TREE_TYPE (type_node);
2230 /* Return the signed version of a TYPE_NODE, a scalar type. */
2233 gnat_signed_type (tree type_node)
2235 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2237 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2239 type = copy_node (type);
2240 TREE_TYPE (type) = type_node;
2242 else if (TREE_TYPE (type_node)
2243 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2244 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2246 type = copy_node (type);
2247 TREE_TYPE (type) = TREE_TYPE (type_node);
2253 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2254 transparently converted to each other. */
2257 gnat_types_compatible_p (tree t1, tree t2)
2259 enum tree_code code;
2261 /* This is the default criterion. */
2262 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2265 /* We only check structural equivalence here. */
2266 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2269 /* Array types are also compatible if they are constrained and have
2270 the same component type and the same domain. */
2271 if (code == ARRAY_TYPE
2272 && TREE_TYPE (t1) == TREE_TYPE (t2)
2273 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2274 || (TYPE_DOMAIN (t1)
2276 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2277 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2278 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2279 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2282 /* Padding record types are also compatible if they pad the same
2283 type and have the same constant size. */
2284 if (code == RECORD_TYPE
2285 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2286 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2287 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2293 /* EXP is an expression for the size of an object. If this size contains
2294 discriminant references, replace them with the maximum (if MAX_P) or
2295 minimum (if !MAX_P) possible value of the discriminant. */
2298 max_size (tree exp, bool max_p)
2300 enum tree_code code = TREE_CODE (exp);
2301 tree type = TREE_TYPE (exp);
2303 switch (TREE_CODE_CLASS (code))
2305 case tcc_declaration:
2310 if (code == CALL_EXPR)
2315 t = maybe_inline_call_in_expr (exp);
2317 return max_size (t, max_p);
2319 n = call_expr_nargs (exp);
2321 argarray = (tree *) alloca (n * sizeof (tree));
2322 for (i = 0; i < n; i++)
2323 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2324 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2329 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2330 modify. Otherwise, we treat it like a variable. */
2331 if (!CONTAINS_PLACEHOLDER_P (exp))
2334 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2336 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2338 case tcc_comparison:
2339 return max_p ? size_one_node : size_zero_node;
2343 case tcc_expression:
2344 switch (TREE_CODE_LENGTH (code))
2347 if (code == NON_LVALUE_EXPR)
2348 return max_size (TREE_OPERAND (exp, 0), max_p);
2351 fold_build1 (code, type,
2352 max_size (TREE_OPERAND (exp, 0),
2353 code == NEGATE_EXPR ? !max_p : max_p));
2356 if (code == COMPOUND_EXPR)
2357 return max_size (TREE_OPERAND (exp, 1), max_p);
2359 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2360 may provide a tighter bound on max_size. */
2361 if (code == MINUS_EXPR
2362 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2364 tree lhs = fold_build2 (MINUS_EXPR, type,
2365 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2366 TREE_OPERAND (exp, 1));
2367 tree rhs = fold_build2 (MINUS_EXPR, type,
2368 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2369 TREE_OPERAND (exp, 1));
2370 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2371 max_size (lhs, max_p),
2372 max_size (rhs, max_p));
2376 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2377 tree rhs = max_size (TREE_OPERAND (exp, 1),
2378 code == MINUS_EXPR ? !max_p : max_p);
2380 /* Special-case wanting the maximum value of a MIN_EXPR.
2381 In that case, if one side overflows, return the other.
2382 sizetype is signed, but we know sizes are non-negative.
2383 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2384 overflowing or the maximum possible value and the RHS
2388 && TREE_CODE (rhs) == INTEGER_CST
2389 && TREE_OVERFLOW (rhs))
2393 && TREE_CODE (lhs) == INTEGER_CST
2394 && TREE_OVERFLOW (lhs))
2396 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2397 && ((TREE_CODE (lhs) == INTEGER_CST
2398 && TREE_OVERFLOW (lhs))
2399 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2400 && !TREE_CONSTANT (rhs))
2403 return fold_build2 (code, type, lhs, rhs);
2407 if (code == SAVE_EXPR)
2409 else if (code == COND_EXPR)
2410 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2411 max_size (TREE_OPERAND (exp, 1), max_p),
2412 max_size (TREE_OPERAND (exp, 2), max_p));
2415 /* Other tree classes cannot happen. */
2423 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2424 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2425 Return a constructor for the template. */
2428 build_template (tree template_type, tree array_type, tree expr)
2430 tree template_elts = NULL_TREE;
2431 tree bound_list = NULL_TREE;
2434 while (TREE_CODE (array_type) == RECORD_TYPE
2435 && (TYPE_IS_PADDING_P (array_type)
2436 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2437 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2439 if (TREE_CODE (array_type) == ARRAY_TYPE
2440 || (TREE_CODE (array_type) == INTEGER_TYPE
2441 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2442 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2444 /* First make the list for a CONSTRUCTOR for the template. Go down the
2445 field list of the template instead of the type chain because this
2446 array might be an Ada array of arrays and we can't tell where the
2447 nested arrays stop being the underlying object. */
2449 for (field = TYPE_FIELDS (template_type); field;
2451 ? (bound_list = TREE_CHAIN (bound_list))
2452 : (array_type = TREE_TYPE (array_type))),
2453 field = TREE_CHAIN (TREE_CHAIN (field)))
2455 tree bounds, min, max;
2457 /* If we have a bound list, get the bounds from there. Likewise
2458 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2459 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2460 This will give us a maximum range. */
2462 bounds = TREE_VALUE (bound_list);
2463 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2464 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2465 else if (expr && TREE_CODE (expr) == PARM_DECL
2466 && DECL_BY_COMPONENT_PTR_P (expr))
2467 bounds = TREE_TYPE (field);
2471 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2472 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2474 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2475 substitute it from OBJECT. */
2476 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2477 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2479 template_elts = tree_cons (TREE_CHAIN (field), max,
2480 tree_cons (field, min, template_elts));
2483 return gnat_build_constructor (template_type, nreverse (template_elts));
2486 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2487 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2488 in the type contains in its DECL_INITIAL the expression to use when
2489 a constructor is made for the type. GNAT_ENTITY is an entity used
2490 to print out an error message if the mechanism cannot be applied to
2491 an object of that type and also for the name. */
2494 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2496 tree record_type = make_node (RECORD_TYPE);
2497 tree pointer32_type;
2498 tree field_list = 0;
2507 /* If TYPE is an unconstrained array, use the underlying array type. */
2508 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2509 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2511 /* If this is an array, compute the number of dimensions in the array,
2512 get the index types, and point to the inner type. */
2513 if (TREE_CODE (type) != ARRAY_TYPE)
2516 for (ndim = 1, inner_type = type;
2517 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2518 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2519 ndim++, inner_type = TREE_TYPE (inner_type))
2522 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2524 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2525 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2526 for (i = ndim - 1, inner_type = type;
2528 i--, inner_type = TREE_TYPE (inner_type))
2529 idx_arr[i] = TYPE_DOMAIN (inner_type);
2531 for (i = 0, inner_type = type;
2533 i++, inner_type = TREE_TYPE (inner_type))
2534 idx_arr[i] = TYPE_DOMAIN (inner_type);
2536 /* Now get the DTYPE value. */
2537 switch (TREE_CODE (type))
2542 if (TYPE_VAX_FLOATING_POINT_P (type))
2543 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2556 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2559 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2562 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2565 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2568 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2571 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2577 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2581 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2582 && TYPE_VAX_FLOATING_POINT_P (type))
2583 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2595 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2606 /* Get the CLASS value. */
2609 case By_Descriptor_A:
2610 case By_Short_Descriptor_A:
2613 case By_Descriptor_NCA:
2614 case By_Short_Descriptor_NCA:
2617 case By_Descriptor_SB:
2618 case By_Short_Descriptor_SB:
2622 case By_Short_Descriptor:
2623 case By_Descriptor_S:
2624 case By_Short_Descriptor_S:
2630 /* Make the type for a descriptor for VMS. The first four fields
2631 are the same for all types. */
2634 = chainon (field_list,
2635 make_descriptor_field
2636 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2637 size_in_bytes ((mech == By_Descriptor_A ||
2638 mech == By_Short_Descriptor_A)
2639 ? inner_type : type)));
2641 field_list = chainon (field_list,
2642 make_descriptor_field ("DTYPE",
2643 gnat_type_for_size (8, 1),
2644 record_type, size_int (dtype)));
2645 field_list = chainon (field_list,
2646 make_descriptor_field ("CLASS",
2647 gnat_type_for_size (8, 1),
2648 record_type, size_int (klass)));
2650 /* Of course this will crash at run-time if the address space is not
2651 within the low 32 bits, but there is nothing else we can do. */
2652 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2655 = chainon (field_list,
2656 make_descriptor_field
2657 ("POINTER", pointer32_type, record_type,
2658 build_unary_op (ADDR_EXPR,
2660 build0 (PLACEHOLDER_EXPR, type))));
2665 case By_Short_Descriptor:
2666 case By_Descriptor_S:
2667 case By_Short_Descriptor_S:
2670 case By_Descriptor_SB:
2671 case By_Short_Descriptor_SB:
2673 = chainon (field_list,
2674 make_descriptor_field
2675 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2676 TREE_CODE (type) == ARRAY_TYPE
2677 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2679 = chainon (field_list,
2680 make_descriptor_field
2681 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2682 TREE_CODE (type) == ARRAY_TYPE
2683 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2686 case By_Descriptor_A:
2687 case By_Short_Descriptor_A:
2688 case By_Descriptor_NCA:
2689 case By_Short_Descriptor_NCA:
2690 field_list = chainon (field_list,
2691 make_descriptor_field ("SCALE",
2692 gnat_type_for_size (8, 1),
2696 field_list = chainon (field_list,
2697 make_descriptor_field ("DIGITS",
2698 gnat_type_for_size (8, 1),
2703 = chainon (field_list,
2704 make_descriptor_field
2705 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2706 size_int ((mech == By_Descriptor_NCA ||
2707 mech == By_Short_Descriptor_NCA)
2709 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2710 : (TREE_CODE (type) == ARRAY_TYPE
2711 && TYPE_CONVENTION_FORTRAN_P (type)
2714 field_list = chainon (field_list,
2715 make_descriptor_field ("DIMCT",
2716 gnat_type_for_size (8, 1),
2720 field_list = chainon (field_list,
2721 make_descriptor_field ("ARSIZE",
2722 gnat_type_for_size (32, 1),
2724 size_in_bytes (type)));
2726 /* Now build a pointer to the 0,0,0... element. */
2727 tem = build0 (PLACEHOLDER_EXPR, type);
2728 for (i = 0, inner_type = type; i < ndim;
2729 i++, inner_type = TREE_TYPE (inner_type))
2730 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2731 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2732 NULL_TREE, NULL_TREE);
2735 = chainon (field_list,
2736 make_descriptor_field
2738 build_pointer_type_for_mode (inner_type, SImode, false),
2741 build_pointer_type_for_mode (inner_type, SImode,
2745 /* Next come the addressing coefficients. */
2746 tem = size_one_node;
2747 for (i = 0; i < ndim; i++)
2751 = size_binop (MULT_EXPR, tem,
2752 size_binop (PLUS_EXPR,
2753 size_binop (MINUS_EXPR,
2754 TYPE_MAX_VALUE (idx_arr[i]),
2755 TYPE_MIN_VALUE (idx_arr[i])),
2758 fname[0] = ((mech == By_Descriptor_NCA ||
2759 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2760 fname[1] = '0' + i, fname[2] = 0;
2762 = chainon (field_list,
2763 make_descriptor_field (fname,
2764 gnat_type_for_size (32, 1),
2765 record_type, idx_length));
2767 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2771 /* Finally here are the bounds. */
2772 for (i = 0; i < ndim; i++)
2776 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2778 = chainon (field_list,
2779 make_descriptor_field
2780 (fname, gnat_type_for_size (32, 1), record_type,
2781 TYPE_MIN_VALUE (idx_arr[i])));
2785 = chainon (field_list,
2786 make_descriptor_field
2787 (fname, gnat_type_for_size (32, 1), record_type,
2788 TYPE_MAX_VALUE (idx_arr[i])));
2793 post_error ("unsupported descriptor type for &", gnat_entity);
2796 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2797 finish_record_type (record_type, field_list, 0, true);
2801 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2802 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2803 in the type contains in its DECL_INITIAL the expression to use when
2804 a constructor is made for the type. GNAT_ENTITY is an entity used
2805 to print out an error message if the mechanism cannot be applied to
2806 an object of that type and also for the name. */
2809 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2811 tree record64_type = make_node (RECORD_TYPE);
2812 tree pointer64_type;
2813 tree field_list64 = 0;
2822 /* If TYPE is an unconstrained array, use the underlying array type. */
2823 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2824 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2826 /* If this is an array, compute the number of dimensions in the array,
2827 get the index types, and point to the inner type. */
2828 if (TREE_CODE (type) != ARRAY_TYPE)
2831 for (ndim = 1, inner_type = type;
2832 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2833 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2834 ndim++, inner_type = TREE_TYPE (inner_type))
2837 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2839 if (mech != By_Descriptor_NCA
2840 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2841 for (i = ndim - 1, inner_type = type;
2843 i--, inner_type = TREE_TYPE (inner_type))
2844 idx_arr[i] = TYPE_DOMAIN (inner_type);
2846 for (i = 0, inner_type = type;
2848 i++, inner_type = TREE_TYPE (inner_type))
2849 idx_arr[i] = TYPE_DOMAIN (inner_type);
2851 /* Now get the DTYPE value. */
2852 switch (TREE_CODE (type))
2857 if (TYPE_VAX_FLOATING_POINT_P (type))
2858 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2871 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2874 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2877 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2880 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2883 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2886 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2892 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2896 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2897 && TYPE_VAX_FLOATING_POINT_P (type))
2898 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2910 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2921 /* Get the CLASS value. */
2924 case By_Descriptor_A:
2927 case By_Descriptor_NCA:
2930 case By_Descriptor_SB:
2934 case By_Descriptor_S:
2940 /* Make the type for a 64bit descriptor for VMS. The first six fields
2941 are the same for all types. */
2943 field_list64 = chainon (field_list64,
2944 make_descriptor_field ("MBO",
2945 gnat_type_for_size (16, 1),
2946 record64_type, size_int (1)));
2948 field_list64 = chainon (field_list64,
2949 make_descriptor_field ("DTYPE",
2950 gnat_type_for_size (8, 1),
2951 record64_type, size_int (dtype)));
2952 field_list64 = chainon (field_list64,
2953 make_descriptor_field ("CLASS",
2954 gnat_type_for_size (8, 1),
2955 record64_type, size_int (klass)));
2957 field_list64 = chainon (field_list64,
2958 make_descriptor_field ("MBMO",
2959 gnat_type_for_size (32, 1),
2960 record64_type, ssize_int (-1)));
2963 = chainon (field_list64,
2964 make_descriptor_field
2965 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2966 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2968 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2971 = chainon (field_list64,
2972 make_descriptor_field
2973 ("POINTER", pointer64_type, record64_type,
2974 build_unary_op (ADDR_EXPR,
2976 build0 (PLACEHOLDER_EXPR, type))));
2981 case By_Descriptor_S:
2984 case By_Descriptor_SB:
2986 = chainon (field_list64,
2987 make_descriptor_field
2988 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2989 TREE_CODE (type) == ARRAY_TYPE
2990 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2992 = chainon (field_list64,
2993 make_descriptor_field
2994 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2995 TREE_CODE (type) == ARRAY_TYPE
2996 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2999 case By_Descriptor_A:
3000 case By_Descriptor_NCA:
3001 field_list64 = chainon (field_list64,
3002 make_descriptor_field ("SCALE",
3003 gnat_type_for_size (8, 1),
3007 field_list64 = chainon (field_list64,
3008 make_descriptor_field ("DIGITS",
3009 gnat_type_for_size (8, 1),
3014 = chainon (field_list64,
3015 make_descriptor_field
3016 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3017 size_int (mech == By_Descriptor_NCA
3019 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3020 : (TREE_CODE (type) == ARRAY_TYPE
3021 && TYPE_CONVENTION_FORTRAN_P (type)
3024 field_list64 = chainon (field_list64,
3025 make_descriptor_field ("DIMCT",
3026 gnat_type_for_size (8, 1),
3030 field_list64 = chainon (field_list64,
3031 make_descriptor_field ("MBZ",
3032 gnat_type_for_size (32, 1),
3035 field_list64 = chainon (field_list64,
3036 make_descriptor_field ("ARSIZE",
3037 gnat_type_for_size (64, 1),
3039 size_in_bytes (type)));
3041 /* Now build a pointer to the 0,0,0... element. */
3042 tem = build0 (PLACEHOLDER_EXPR, type);
3043 for (i = 0, inner_type = type; i < ndim;
3044 i++, inner_type = TREE_TYPE (inner_type))
3045 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3046 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3047 NULL_TREE, NULL_TREE);
3050 = chainon (field_list64,
3051 make_descriptor_field
3053 build_pointer_type_for_mode (inner_type, DImode, false),
3056 build_pointer_type_for_mode (inner_type, DImode,
3060 /* Next come the addressing coefficients. */
3061 tem = size_one_node;
3062 for (i = 0; i < ndim; i++)
3066 = size_binop (MULT_EXPR, tem,
3067 size_binop (PLUS_EXPR,
3068 size_binop (MINUS_EXPR,
3069 TYPE_MAX_VALUE (idx_arr[i]),
3070 TYPE_MIN_VALUE (idx_arr[i])),
3073 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3074 fname[1] = '0' + i, fname[2] = 0;
3076 = chainon (field_list64,
3077 make_descriptor_field (fname,
3078 gnat_type_for_size (64, 1),
3079 record64_type, idx_length));
3081 if (mech == By_Descriptor_NCA)
3085 /* Finally here are the bounds. */
3086 for (i = 0; i < ndim; i++)
3090 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3092 = chainon (field_list64,
3093 make_descriptor_field
3094 (fname, gnat_type_for_size (64, 1), record64_type,
3095 TYPE_MIN_VALUE (idx_arr[i])));
3099 = chainon (field_list64,
3100 make_descriptor_field
3101 (fname, gnat_type_for_size (64, 1), record64_type,
3102 TYPE_MAX_VALUE (idx_arr[i])));
3107 post_error ("unsupported descriptor type for &", gnat_entity);
3110 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3111 finish_record_type (record64_type, field_list64, 0, true);
3112 return record64_type;
3115 /* Utility routine for above code to make a field. */
3118 make_descriptor_field (const char *name, tree type,
3119 tree rec_type, tree initial)
3122 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3124 DECL_INITIAL (field) = initial;
3128 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3129 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3130 which the VMS descriptor is passed. */
3133 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3135 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3136 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3137 /* The CLASS field is the 3rd field in the descriptor. */
3138 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3139 /* The POINTER field is the 6th field in the descriptor. */
3140 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
3142 /* Retrieve the value of the POINTER field. */
3144 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3146 if (POINTER_TYPE_P (gnu_type))
3147 return convert (gnu_type, gnu_expr64);
3149 else if (TYPE_FAT_POINTER_P (gnu_type))
3151 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3152 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3153 tree template_type = TREE_TYPE (p_bounds_type);
3154 tree min_field = TYPE_FIELDS (template_type);
3155 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3156 tree template_tree, template_addr, aflags, dimct, t, u;
3157 /* See the head comment of build_vms_descriptor. */
3158 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3159 tree lfield, ufield;
3161 /* Convert POINTER to the type of the P_ARRAY field. */
3162 gnu_expr64 = convert (p_array_type, gnu_expr64);
3166 case 1: /* Class S */
3167 case 15: /* Class SB */
3168 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3169 t = TREE_CHAIN (TREE_CHAIN (klass));
3170 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3171 t = tree_cons (min_field,
3172 convert (TREE_TYPE (min_field), integer_one_node),
3173 tree_cons (max_field,
3174 convert (TREE_TYPE (max_field), t),
3176 template_tree = gnat_build_constructor (template_type, t);
3177 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3179 /* For class S, we are done. */
3183 /* Test that we really have a SB descriptor, like DEC Ada. */
3184 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3185 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3186 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3187 /* If so, there is already a template in the descriptor and
3188 it is located right after the POINTER field. The fields are
3189 64bits so they must be repacked. */
3190 t = TREE_CHAIN (pointer64);
3191 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3192 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3195 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3197 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3199 /* Build the template in the form of a constructor. */
3200 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3201 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3202 ufield, NULL_TREE));
3203 template_tree = gnat_build_constructor (template_type, t);
3205 /* Otherwise use the {1, LENGTH} template we build above. */
3206 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3207 build_unary_op (ADDR_EXPR, p_bounds_type,
3212 case 4: /* Class A */
3213 /* The AFLAGS field is the 3rd field after the pointer in the
3215 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3216 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3217 /* The DIMCT field is the next field in the descriptor after
3220 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3221 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3222 or FL_COEFF or FL_BOUNDS not set. */
3223 u = build_int_cst (TREE_TYPE (aflags), 192);
3224 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3225 build_binary_op (NE_EXPR, integer_type_node,
3227 convert (TREE_TYPE (dimct),
3229 build_binary_op (NE_EXPR, integer_type_node,
3230 build2 (BIT_AND_EXPR,
3234 /* There is already a template in the descriptor and it is located
3235 in block 3. The fields are 64bits so they must be repacked. */
3236 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3238 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3239 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3242 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3244 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3246 /* Build the template in the form of a constructor. */
3247 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3248 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3249 ufield, NULL_TREE));
3250 template_tree = gnat_build_constructor (template_type, t);
3251 template_tree = build3 (COND_EXPR, template_type, u,
3252 build_call_raise (CE_Length_Check_Failed, Empty,
3253 N_Raise_Constraint_Error),
3256 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3259 case 10: /* Class NCA */
3261 post_error ("unsupported descriptor type for &", gnat_subprog);
3262 template_addr = integer_zero_node;
3266 /* Build the fat pointer in the form of a constructor. */
3267 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3268 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3269 template_addr, NULL_TREE));
3270 return gnat_build_constructor (gnu_type, t);
3277 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3278 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3279 which the VMS descriptor is passed. */
3282 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3284 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3285 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3286 /* The CLASS field is the 3rd field in the descriptor. */
3287 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3288 /* The POINTER field is the 4th field in the descriptor. */
3289 tree pointer = TREE_CHAIN (klass);
3291 /* Retrieve the value of the POINTER field. */
3293 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3295 if (POINTER_TYPE_P (gnu_type))
3296 return convert (gnu_type, gnu_expr32);
3298 else if (TYPE_FAT_POINTER_P (gnu_type))
3300 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3301 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3302 tree template_type = TREE_TYPE (p_bounds_type);
3303 tree min_field = TYPE_FIELDS (template_type);
3304 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3305 tree template_tree, template_addr, aflags, dimct, t, u;
3306 /* See the head comment of build_vms_descriptor. */
3307 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3309 /* Convert POINTER to the type of the P_ARRAY field. */
3310 gnu_expr32 = convert (p_array_type, gnu_expr32);
3314 case 1: /* Class S */
3315 case 15: /* Class SB */
3316 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3317 t = TYPE_FIELDS (desc_type);
3318 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3319 t = tree_cons (min_field,
3320 convert (TREE_TYPE (min_field), integer_one_node),
3321 tree_cons (max_field,
3322 convert (TREE_TYPE (max_field), t),
3324 template_tree = gnat_build_constructor (template_type, t);
3325 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3327 /* For class S, we are done. */
3331 /* Test that we really have a SB descriptor, like DEC Ada. */
3332 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3333 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3334 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3335 /* If so, there is already a template in the descriptor and
3336 it is located right after the POINTER field. */
3337 t = TREE_CHAIN (pointer);
3339 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3340 /* Otherwise use the {1, LENGTH} template we build above. */
3341 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3342 build_unary_op (ADDR_EXPR, p_bounds_type,
3347 case 4: /* Class A */
3348 /* The AFLAGS field is the 7th field in the descriptor. */
3349 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3350 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3351 /* The DIMCT field is the 8th field in the descriptor. */
3353 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3354 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3355 or FL_COEFF or FL_BOUNDS not set. */
3356 u = build_int_cst (TREE_TYPE (aflags), 192);
3357 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3358 build_binary_op (NE_EXPR, integer_type_node,
3360 convert (TREE_TYPE (dimct),
3362 build_binary_op (NE_EXPR, integer_type_node,
3363 build2 (BIT_AND_EXPR,
3367 /* There is already a template in the descriptor and it is
3368 located at the start of block 3 (12th field). */
3369 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3371 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3372 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3373 build_call_raise (CE_Length_Check_Failed, Empty,
3374 N_Raise_Constraint_Error),
3377 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3380 case 10: /* Class NCA */
3382 post_error ("unsupported descriptor type for &", gnat_subprog);
3383 template_addr = integer_zero_node;
3387 /* Build the fat pointer in the form of a constructor. */
3388 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3389 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3390 template_addr, NULL_TREE));
3392 return gnat_build_constructor (gnu_type, t);
3399 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3400 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3401 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3402 VMS descriptor is passed. */
3405 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3406 Entity_Id gnat_subprog)
3408 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3409 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3410 tree mbo = TYPE_FIELDS (desc_type);
3411 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3412 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3413 tree is64bit, gnu_expr32, gnu_expr64;
3415 /* If the field name is not MBO, it must be 32-bit and no alternate.
3416 Otherwise primary must be 64-bit and alternate 32-bit. */
3417 if (strcmp (mbostr, "MBO") != 0)
3418 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3420 /* Build the test for 64-bit descriptor. */
3421 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3422 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3424 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3425 build_binary_op (EQ_EXPR, integer_type_node,
3426 convert (integer_type_node, mbo),
3428 build_binary_op (EQ_EXPR, integer_type_node,
3429 convert (integer_type_node, mbmo),
3430 integer_minus_one_node));
3432 /* Build the 2 possible end results. */
3433 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3434 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3435 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3437 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3440 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3441 and the GNAT node GNAT_SUBPROG. */
3444 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3446 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3447 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3448 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3451 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3452 gnu_param_list = NULL_TREE;
3454 begin_subprog_body (gnu_stub_decl);
3457 start_stmt_group ();
3459 /* Loop over the parameters of the stub and translate any of them
3460 passed by descriptor into a by reference one. */
3461 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3462 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3464 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3465 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3467 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3469 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3471 DECL_PARM_ALT_TYPE (gnu_stub_param),
3474 gnu_param = gnu_stub_param;
3476 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3479 gnu_body = end_stmt_group ();
3481 /* Invoke the internal subprogram. */
3482 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3484 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3486 nreverse (gnu_param_list));
3488 /* Propagate the return value, if any. */
3489 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3490 append_to_statement_list (gnu_subprog_call, &gnu_body);
3492 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3498 allocate_struct_function (gnu_stub_decl, false);
3499 end_subprog_body (gnu_body);
3502 /* Build a type to be used to represent an aliased object whose nominal
3503 type is an unconstrained array. This consists of a RECORD_TYPE containing
3504 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3505 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3506 is used to represent an arbitrary unconstrained object. Use NAME
3507 as the name of the record. */
3510 build_unc_object_type (tree template_type, tree object_type, tree name)
3512 tree type = make_node (RECORD_TYPE);
3513 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3514 template_type, type, 0, 0, 0, 1);
3515 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3518 TYPE_NAME (type) = name;
3519 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3520 finish_record_type (type,
3521 chainon (chainon (NULL_TREE, template_field),
3528 /* Same, taking a thin or fat pointer type instead of a template type. */
3531 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3536 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3539 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3540 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3541 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3542 return build_unc_object_type (template_type, object_type, name);
3545 /* Shift the component offsets within an unconstrained object TYPE to make it
3546 suitable for use as a designated type for thin pointers. */
3549 shift_unc_components_for_thin_pointers (tree type)
3551 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3552 allocated past the BOUNDS template. The designated type is adjusted to
3553 have ARRAY at position zero and the template at a negative offset, so
3554 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3556 tree bounds_field = TYPE_FIELDS (type);
3557 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3559 DECL_FIELD_OFFSET (bounds_field)
3560 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3562 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3563 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3566 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3567 In the normal case this is just two adjustments, but we have more to
3568 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3571 update_pointer_to (tree old_type, tree new_type)
3573 tree ptr = TYPE_POINTER_TO (old_type);
3574 tree ref = TYPE_REFERENCE_TO (old_type);
3578 /* If this is the main variant, process all the other variants first. */
3579 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3580 for (type = TYPE_NEXT_VARIANT (old_type); type;
3581 type = TYPE_NEXT_VARIANT (type))
3582 update_pointer_to (type, new_type);
3584 /* If no pointers and no references, we are done. */
3588 /* Merge the old type qualifiers in the new type.
3590 Each old variant has qualifiers for specific reasons, and the new
3591 designated type as well. Each set of qualifiers represents useful
3592 information grabbed at some point, and merging the two simply unifies
3593 these inputs into the final type description.
3595 Consider for instance a volatile type frozen after an access to constant
3596 type designating it; after the designated type's freeze, we get here with
3597 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3598 when the access type was processed. We will make a volatile and readonly
3599 designated type, because that's what it really is.
3601 We might also get here for a non-dummy OLD_TYPE variant with different
3602 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3603 to private record type elaboration (see the comments around the call to
3604 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3605 the qualifiers in those cases too, to avoid accidentally discarding the
3606 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3608 = build_qualified_type (new_type,
3609 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3611 /* If old type and new type are identical, there is nothing to do. */
3612 if (old_type == new_type)
3615 /* Otherwise, first handle the simple case. */
3616 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3618 TYPE_POINTER_TO (new_type) = ptr;
3619 TYPE_REFERENCE_TO (new_type) = ref;
3621 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3622 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3623 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3624 TREE_TYPE (ptr1) = new_type;
3626 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3627 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3628 ref1 = TYPE_NEXT_VARIANT (ref1))
3629 TREE_TYPE (ref1) = new_type;
3632 /* Now deal with the unconstrained array case. In this case the "pointer"
3633 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3634 Turn them into pointers to the correct types using update_pointer_to. */
3635 else if (!TYPE_FAT_POINTER_P (ptr))
3640 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3641 tree array_field = TYPE_FIELDS (ptr);
3642 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3643 tree new_ptr = TYPE_POINTER_TO (new_type);
3647 /* Make pointers to the dummy template point to the real template. */
3649 (TREE_TYPE (TREE_TYPE (bounds_field)),
3650 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3652 /* The references to the template bounds present in the array type
3653 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3654 are updating PTR to make it a full replacement for NEW_PTR as
3655 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3656 to make it of type PTR. */
3657 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3658 build0 (PLACEHOLDER_EXPR, ptr),
3659 bounds_field, NULL_TREE);
3661 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3662 to the dummy array point to it. */
3664 (TREE_TYPE (TREE_TYPE (array_field)),
3665 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3666 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3668 /* Make PTR the pointer to NEW_TYPE. */
3669 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3670 = TREE_TYPE (new_type) = ptr;
3672 /* And show the original pointer NEW_PTR to the debugger. This is the
3673 counterpart of the equivalent processing in gnat_pushdecl when the
3674 unconstrained array type is frozen after access types to it. Note
3675 that update_pointer_to can be invoked multiple times on the same
3676 couple of types because of the type variants. */
3678 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3679 && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3681 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3682 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3684 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3685 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3687 /* Now handle updating the allocation record, what the thin pointer
3688 points to. Update all pointers from the old record into the new
3689 one, update the type of the array field, and recompute the size. */
3690 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3692 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3693 = TREE_TYPE (TREE_TYPE (array_field));
3695 /* The size recomputation needs to account for alignment constraints, so
3696 we let layout_type work it out. This will reset the field offsets to
3697 what they would be in a regular record, so we shift them back to what
3698 we want them to be for a thin pointer designated type afterwards. */
3699 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3700 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3701 TYPE_SIZE (new_obj_rec) = 0;
3702 layout_type (new_obj_rec);
3704 shift_unc_components_for_thin_pointers (new_obj_rec);
3706 /* We are done, at last. */
3707 rest_of_record_type_compilation (ptr);
3711 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3712 unconstrained one. This involves making or finding a template. */
3715 convert_to_fat_pointer (tree type, tree expr)
3717 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3718 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3719 tree etype = TREE_TYPE (expr);
3722 /* If EXPR is null, make a fat pointer that contains null pointers to the
3723 template and array. */
3724 if (integer_zerop (expr))
3726 gnat_build_constructor
3728 tree_cons (TYPE_FIELDS (type),
3729 convert (p_array_type, expr),
3730 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3731 convert (build_pointer_type (template_type),
3735 /* If EXPR is a thin pointer, make template and data from the record.. */
3736 else if (TYPE_THIN_POINTER_P (etype))
3738 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3740 expr = save_expr (expr);
3741 if (TREE_CODE (expr) == ADDR_EXPR)
3742 expr = TREE_OPERAND (expr, 0);
3744 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3746 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3747 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3748 build_component_ref (expr, NULL_TREE,
3749 TREE_CHAIN (fields), false));
3752 /* Otherwise, build the constructor for the template. */
3754 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3756 /* The final result is a constructor for the fat pointer.
3758 If EXPR is an argument of a foreign convention subprogram, the type it
3759 points to is directly the component type. In this case, the expression
3760 type may not match the corresponding FIELD_DECL type at this point, so we
3761 call "convert" here to fix that up if necessary. This type consistency is
3762 required, for instance because it ensures that possible later folding of
3763 COMPONENT_REFs against this constructor always yields something of the
3764 same type as the initial reference.
3766 Note that the call to "build_template" above is still fine because it
3767 will only refer to the provided TEMPLATE_TYPE in this case. */
3769 gnat_build_constructor
3771 tree_cons (TYPE_FIELDS (type),
3772 convert (p_array_type, expr),
3773 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3774 build_unary_op (ADDR_EXPR, NULL_TREE,
3779 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3780 is something that is a fat pointer, so convert to it first if it EXPR
3781 is not already a fat pointer. */
3784 convert_to_thin_pointer (tree type, tree expr)
3786 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3788 = convert_to_fat_pointer
3789 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3791 /* We get the pointer to the data and use a NOP_EXPR to make it the
3793 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3795 expr = build1 (NOP_EXPR, type, expr);
3800 /* Create an expression whose value is that of EXPR,
3801 converted to type TYPE. The TREE_TYPE of the value
3802 is always TYPE. This function implements all reasonable
3803 conversions; callers should filter out those that are
3804 not permitted by the language being compiled. */
3807 convert (tree type, tree expr)
3809 enum tree_code code = TREE_CODE (type);
3810 tree etype = TREE_TYPE (expr);
3811 enum tree_code ecode = TREE_CODE (etype);
3813 /* If EXPR is already the right type, we are done. */
3817 /* If both input and output have padding and are of variable size, do this
3818 as an unchecked conversion. Likewise if one is a mere variant of the
3819 other, so we avoid a pointless unpad/repad sequence. */
3820 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3821 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3822 && (!TREE_CONSTANT (TYPE_SIZE (type))
3823 || !TREE_CONSTANT (TYPE_SIZE (etype))
3824 || gnat_types_compatible_p (type, etype)
3825 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3826 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3829 /* If the output type has padding, convert to the inner type and make a
3830 constructor to build the record, unless a variable size is involved. */
3831 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3833 /* If we previously converted from another type and our type is
3834 of variable size, remove the conversion to avoid the need for
3835 variable-sized temporaries. Likewise for a conversion between
3836 original and packable version. */
3837 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3838 && (!TREE_CONSTANT (TYPE_SIZE (type))
3839 || (ecode == RECORD_TYPE
3840 && TYPE_NAME (etype)
3841 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3842 expr = TREE_OPERAND (expr, 0);
3844 /* If we are just removing the padding from expr, convert the original
3845 object if we have variable size in order to avoid the need for some
3846 variable-sized temporaries. Likewise if the padding is a variant
3847 of the other, so we avoid a pointless unpad/repad sequence. */
3848 if (TREE_CODE (expr) == COMPONENT_REF
3849 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3850 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3851 && (!TREE_CONSTANT (TYPE_SIZE (type))
3852 || gnat_types_compatible_p (type,
3853 TREE_TYPE (TREE_OPERAND (expr, 0)))
3854 || (ecode == RECORD_TYPE
3855 && TYPE_NAME (etype)
3856 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3857 return convert (type, TREE_OPERAND (expr, 0));
3859 /* If the result type is a padded type with a self-referentially-sized
3860 field and the expression type is a record, do this as an unchecked
3862 if (TREE_CODE (etype) == RECORD_TYPE
3863 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3864 return unchecked_convert (type, expr, false);
3866 /* If we are converting between array types with variable size, do the
3867 final conversion as an unchecked conversion, again to avoid the need
3868 for some variable-sized temporaries. If valid, this conversion is
3869 very likely purely technical and without real effects. */
3870 if (TREE_CODE (etype) == ARRAY_TYPE
3871 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3872 && !TREE_CONSTANT (TYPE_SIZE (etype))
3873 && !TREE_CONSTANT (TYPE_SIZE (type)))
3874 return unchecked_convert (type,
3875 convert (TREE_TYPE (TYPE_FIELDS (type)),
3880 gnat_build_constructor (type,
3881 tree_cons (TYPE_FIELDS (type),
3883 (TYPE_FIELDS (type)),
3888 /* If the input type has padding, remove it and convert to the output type.
3889 The conditions ordering is arranged to ensure that the output type is not
3890 a padding type here, as it is not clear whether the conversion would
3891 always be correct if this was to happen. */
3892 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3896 /* If we have just converted to this padded type, just get the
3897 inner expression. */
3898 if (TREE_CODE (expr) == CONSTRUCTOR
3899 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3900 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3901 == TYPE_FIELDS (etype))
3903 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3905 /* Otherwise, build an explicit component reference. */
3908 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3910 return convert (type, unpadded);
3913 /* If the input is a biased type, adjust first. */
3914 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3915 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3916 fold_convert (TREE_TYPE (etype),
3918 TYPE_MIN_VALUE (etype)));
3920 /* If the input is a justified modular type, we need to extract the actual
3921 object before converting it to any other type with the exceptions of an
3922 unconstrained array or of a mere type variant. It is useful to avoid the
3923 extraction and conversion in the type variant case because it could end
3924 up replacing a VAR_DECL expr by a constructor and we might be about the
3925 take the address of the result. */
3926 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3927 && code != UNCONSTRAINED_ARRAY_TYPE
3928 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3929 return convert (type, build_component_ref (expr, NULL_TREE,
3930 TYPE_FIELDS (etype), false));
3932 /* If converting to a type that contains a template, convert to the data
3933 type and then build the template. */
3934 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3936 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3938 /* If the source already has a template, get a reference to the
3939 associated array only, as we are going to rebuild a template
3940 for the target type anyway. */
3941 expr = maybe_unconstrained_array (expr);
3944 gnat_build_constructor
3946 tree_cons (TYPE_FIELDS (type),
3947 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3948 obj_type, NULL_TREE),
3949 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3950 convert (obj_type, expr), NULL_TREE)));
3953 /* There are some special cases of expressions that we process
3955 switch (TREE_CODE (expr))
3961 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3962 conversion in gnat_expand_expr. NULL_EXPR does not represent
3963 and actual value, so no conversion is needed. */
3964 expr = copy_node (expr);
3965 TREE_TYPE (expr) = type;
3969 /* If we are converting a STRING_CST to another constrained array type,
3970 just make a new one in the proper type. */
3971 if (code == ecode && AGGREGATE_TYPE_P (etype)
3972 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3973 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3975 expr = copy_node (expr);
3976 TREE_TYPE (expr) = type;
3982 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3983 a new one in the proper type. */
3984 if (code == ecode && gnat_types_compatible_p (type, etype))
3986 expr = copy_node (expr);
3987 TREE_TYPE (expr) = type;
3991 /* Likewise for a conversion between original and packable version, but
3992 we have to work harder in order to preserve type consistency. */
3994 && code == RECORD_TYPE
3995 && TYPE_NAME (type) == TYPE_NAME (etype))
3997 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3998 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3999 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4000 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4001 unsigned HOST_WIDE_INT idx;
4004 /* Whether we need to clear TREE_CONSTANT et al. on the output
4005 constructor when we convert in place. */
4006 bool clear_constant = false;
4008 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4010 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4011 /* We expect only simple constructors. Otherwise, punt. */
4012 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4015 elt->value = convert (TREE_TYPE (field), value);
4017 /* If packing has made this field a bitfield and the input
4018 value couldn't be emitted statically any more, we need to
4019 clear TREE_CONSTANT on our output. */
4020 if (!clear_constant && TREE_CONSTANT (expr)
4021 && !CONSTRUCTOR_BITFIELD_P (efield)
4022 && CONSTRUCTOR_BITFIELD_P (field)
4023 && !initializer_constant_valid_for_bitfield_p (value))
4024 clear_constant = true;
4026 efield = TREE_CHAIN (efield);
4027 field = TREE_CHAIN (field);
4030 /* If we have been able to match and convert all the input fields
4031 to their output type, convert in place now. We'll fallback to a
4032 view conversion downstream otherwise. */
4035 expr = copy_node (expr);
4036 TREE_TYPE (expr) = type;
4037 CONSTRUCTOR_ELTS (expr) = v;
4039 TREE_CONSTANT (expr) = TREE_STATIC (expr) = false;
4045 case UNCONSTRAINED_ARRAY_REF:
4046 /* Convert this to the type of the inner array by getting the address of
4047 the array from the template. */
4048 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4049 build_component_ref (TREE_OPERAND (expr, 0),
4050 get_identifier ("P_ARRAY"),
4052 etype = TREE_TYPE (expr);
4053 ecode = TREE_CODE (etype);
4056 case VIEW_CONVERT_EXPR:
4058 /* GCC 4.x is very sensitive to type consistency overall, and view
4059 conversions thus are very frequent. Even though just "convert"ing
4060 the inner operand to the output type is fine in most cases, it
4061 might expose unexpected input/output type mismatches in special
4062 circumstances so we avoid such recursive calls when we can. */
4063 tree op0 = TREE_OPERAND (expr, 0);
4065 /* If we are converting back to the original type, we can just
4066 lift the input conversion. This is a common occurrence with
4067 switches back-and-forth amongst type variants. */
4068 if (type == TREE_TYPE (op0))
4071 /* Otherwise, if we're converting between two aggregate types, we
4072 might be allowed to substitute the VIEW_CONVERT_EXPR target type
4073 in place or to just convert the inner expression. */
4074 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4076 /* If we are converting between mere variants, we can just
4077 substitute the VIEW_CONVERT_EXPR in place. */
4078 if (gnat_types_compatible_p (type, etype))
4079 return build1 (VIEW_CONVERT_EXPR, type, op0);
4081 /* Otherwise, we may just bypass the input view conversion unless
4082 one of the types is a fat pointer, which is handled by
4083 specialized code below which relies on exact type matching. */
4084 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4085 return convert (type, op0);
4091 /* If both types are record types, just convert the pointer and
4092 make a new INDIRECT_REF.
4094 ??? Disable this for now since it causes problems with the
4095 code in build_binary_op for MODIFY_EXPR which wants to
4096 strip off conversions. But that code really is a mess and
4097 we need to do this a much better way some time. */
4099 && (TREE_CODE (type) == RECORD_TYPE
4100 || TREE_CODE (type) == UNION_TYPE)
4101 && (TREE_CODE (etype) == RECORD_TYPE
4102 || TREE_CODE (etype) == UNION_TYPE)
4103 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4104 return build_unary_op (INDIRECT_REF, NULL_TREE,
4105 convert (build_pointer_type (type),
4106 TREE_OPERAND (expr, 0)));
4113 /* Check for converting to a pointer to an unconstrained array. */
4114 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4115 return convert_to_fat_pointer (type, expr);
4117 /* If we are converting between two aggregate types that are mere
4118 variants, just make a VIEW_CONVERT_EXPR. */
4119 else if (code == ecode
4120 && AGGREGATE_TYPE_P (type)
4121 && gnat_types_compatible_p (type, etype))
4122 return build1 (VIEW_CONVERT_EXPR, type, expr);
4124 /* In all other cases of related types, make a NOP_EXPR. */
4125 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4126 || (code == INTEGER_CST && ecode == INTEGER_CST
4127 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4128 return fold_convert (type, expr);
4133 return fold_build1 (CONVERT_EXPR, type, expr);
4136 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4137 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4138 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4139 return unchecked_convert (type, expr, false);
4140 else if (TYPE_BIASED_REPRESENTATION_P (type))
4141 return fold_convert (type,
4142 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4143 convert (TREE_TYPE (type), expr),
4144 TYPE_MIN_VALUE (type)));
4146 /* ... fall through ... */
4150 /* If we are converting an additive expression to an integer type
4151 with lower precision, be wary of the optimization that can be
4152 applied by convert_to_integer. There are 2 problematic cases:
4153 - if the first operand was originally of a biased type,
4154 because we could be recursively called to convert it
4155 to an intermediate type and thus rematerialize the
4156 additive operator endlessly,
4157 - if the expression contains a placeholder, because an
4158 intermediate conversion that changes the sign could
4159 be inserted and thus introduce an artificial overflow
4160 at compile time when the placeholder is substituted. */
4161 if (code == INTEGER_TYPE
4162 && ecode == INTEGER_TYPE
4163 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4164 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4166 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4168 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4169 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4170 || CONTAINS_PLACEHOLDER_P (expr))
4171 return build1 (NOP_EXPR, type, expr);
4174 return fold (convert_to_integer (type, expr));
4177 case REFERENCE_TYPE:
4178 /* If converting between two pointers to records denoting
4179 both a template and type, adjust if needed to account
4180 for any differing offsets, since one might be negative. */
4181 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4184 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4185 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4186 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4187 sbitsize_int (BITS_PER_UNIT));
4189 expr = build1 (NOP_EXPR, type, expr);
4190 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4191 if (integer_zerop (byte_diff))
4194 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4195 fold (convert (sizetype, byte_diff)));
4198 /* If converting to a thin pointer, handle specially. */
4199 if (TYPE_THIN_POINTER_P (type)
4200 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4201 return convert_to_thin_pointer (type, expr);
4203 /* If converting fat pointer to normal pointer, get the pointer to the
4204 array and then convert it. */
4205 else if (TYPE_FAT_POINTER_P (etype))
4206 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4209 return fold (convert_to_pointer (type, expr));
4212 return fold (convert_to_real (type, expr));
4215 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4217 gnat_build_constructor
4218 (type, tree_cons (TYPE_FIELDS (type),
4219 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4222 /* ... fall through ... */
4225 /* In these cases, assume the front-end has validated the conversion.
4226 If the conversion is valid, it will be a bit-wise conversion, so
4227 it can be viewed as an unchecked conversion. */
4228 return unchecked_convert (type, expr, false);
4231 /* This is a either a conversion between a tagged type and some
4232 subtype, which we have to mark as a UNION_TYPE because of
4233 overlapping fields or a conversion of an Unchecked_Union. */
4234 return unchecked_convert (type, expr, false);
4236 case UNCONSTRAINED_ARRAY_TYPE:
4237 /* If EXPR is a constrained array, take its address, convert it to a
4238 fat pointer, and then dereference it. Likewise if EXPR is a
4239 record containing both a template and a constrained array.
4240 Note that a record representing a justified modular type
4241 always represents a packed constrained array. */
4242 if (ecode == ARRAY_TYPE
4243 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4244 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4245 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4248 (INDIRECT_REF, NULL_TREE,
4249 convert_to_fat_pointer (TREE_TYPE (type),
4250 build_unary_op (ADDR_EXPR,
4253 /* Do something very similar for converting one unconstrained
4254 array to another. */
4255 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4257 build_unary_op (INDIRECT_REF, NULL_TREE,
4258 convert (TREE_TYPE (type),
4259 build_unary_op (ADDR_EXPR,
4265 return fold (convert_to_complex (type, expr));
4272 /* Remove all conversions that are done in EXP. This includes converting
4273 from a padded type or to a justified modular type. If TRUE_ADDRESS
4274 is true, always return the address of the containing object even if
4275 the address is not bit-aligned. */
4278 remove_conversions (tree exp, bool true_address)
4280 switch (TREE_CODE (exp))
4284 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4285 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4287 remove_conversions (VEC_index (constructor_elt,
4288 CONSTRUCTOR_ELTS (exp), 0)->value,
4293 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4294 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4295 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4298 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4300 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4309 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4310 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4311 likewise return an expression pointing to the underlying array. */
4314 maybe_unconstrained_array (tree exp)
4316 enum tree_code code = TREE_CODE (exp);
4319 switch (TREE_CODE (TREE_TYPE (exp)))
4321 case UNCONSTRAINED_ARRAY_TYPE:
4322 if (code == UNCONSTRAINED_ARRAY_REF)
4325 = build_unary_op (INDIRECT_REF, NULL_TREE,
4326 build_component_ref (TREE_OPERAND (exp, 0),
4327 get_identifier ("P_ARRAY"),
4329 TREE_READONLY (new_exp) = TREE_STATIC (new_exp)
4330 = TREE_READONLY (exp);
4334 else if (code == NULL_EXPR)
4335 return build1 (NULL_EXPR,
4336 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4337 (TREE_TYPE (TREE_TYPE (exp))))),
4338 TREE_OPERAND (exp, 0));
4341 /* If this is a padded type, convert to the unpadded type and see if
4342 it contains a template. */
4343 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4345 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4346 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4347 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4349 build_component_ref (new_exp, NULL_TREE,
4351 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4354 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4356 build_component_ref (exp, NULL_TREE,
4357 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4367 /* Return true if EXPR is an expression that can be folded as an operand
4368 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4371 can_fold_for_view_convert_p (tree expr)
4375 /* The folder will fold NOP_EXPRs between integral types with the same
4376 precision (in the middle-end's sense). We cannot allow it if the
4377 types don't have the same precision in the Ada sense as well. */
4378 if (TREE_CODE (expr) != NOP_EXPR)
4381 t1 = TREE_TYPE (expr);
4382 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4384 /* Defer to the folder for non-integral conversions. */
4385 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4388 /* Only fold conversions that preserve both precisions. */
4389 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4390 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4396 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4397 If NOTRUNC_P is true, truncation operations should be suppressed.
4399 Special care is required with (source or target) integral types whose
4400 precision is not equal to their size, to make sure we fetch or assign
4401 the value bits whose location might depend on the endianness, e.g.
4403 Rmsize : constant := 8;
4404 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4406 type Bit_Array is array (1 .. Rmsize) of Boolean;
4407 pragma Pack (Bit_Array);
4409 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4411 Value : Int := 2#1000_0001#;
4412 Vbits : Bit_Array := To_Bit_Array (Value);
4414 we expect the 8 bits at Vbits'Address to always contain Value, while
4415 their original location depends on the endianness, at Value'Address
4416 on a little-endian architecture but not on a big-endian one. */
4419 unchecked_convert (tree type, tree expr, bool notrunc_p)
4421 tree etype = TREE_TYPE (expr);
4423 /* If the expression is already the right type, we are done. */
4427 /* If both types types are integral just do a normal conversion.
4428 Likewise for a conversion to an unconstrained array. */
4429 if ((((INTEGRAL_TYPE_P (type)
4430 && !(TREE_CODE (type) == INTEGER_TYPE
4431 && TYPE_VAX_FLOATING_POINT_P (type)))
4432 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4433 || (TREE_CODE (type) == RECORD_TYPE
4434 && TYPE_JUSTIFIED_MODULAR_P (type)))
4435 && ((INTEGRAL_TYPE_P (etype)
4436 && !(TREE_CODE (etype) == INTEGER_TYPE
4437 && TYPE_VAX_FLOATING_POINT_P (etype)))
4438 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4439 || (TREE_CODE (etype) == RECORD_TYPE
4440 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4441 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4443 if (TREE_CODE (etype) == INTEGER_TYPE
4444 && TYPE_BIASED_REPRESENTATION_P (etype))
4446 tree ntype = copy_type (etype);
4447 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4448 TYPE_MAIN_VARIANT (ntype) = ntype;
4449 expr = build1 (NOP_EXPR, ntype, expr);
4452 if (TREE_CODE (type) == INTEGER_TYPE
4453 && TYPE_BIASED_REPRESENTATION_P (type))
4455 tree rtype = copy_type (type);
4456 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4457 TYPE_MAIN_VARIANT (rtype) = rtype;
4458 expr = convert (rtype, expr);
4459 expr = build1 (NOP_EXPR, type, expr);
4462 expr = convert (type, expr);
4465 /* If we are converting to an integral type whose precision is not equal
4466 to its size, first unchecked convert to a record that contains an
4467 object of the output type. Then extract the field. */
4468 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4469 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4470 GET_MODE_BITSIZE (TYPE_MODE (type))))
4472 tree rec_type = make_node (RECORD_TYPE);
4473 tree field = create_field_decl (get_identifier ("OBJ"), type,
4474 rec_type, 1, 0, 0, 0);
4476 TYPE_FIELDS (rec_type) = field;
4477 layout_type (rec_type);
4479 expr = unchecked_convert (rec_type, expr, notrunc_p);
4480 expr = build_component_ref (expr, NULL_TREE, field, 0);
4483 /* Similarly if we are converting from an integral type whose precision
4484 is not equal to its size. */
4485 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4486 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4487 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4489 tree rec_type = make_node (RECORD_TYPE);
4491 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4494 TYPE_FIELDS (rec_type) = field;
4495 layout_type (rec_type);
4497 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4498 expr = unchecked_convert (type, expr, notrunc_p);
4501 /* We have a special case when we are converting between two
4502 unconstrained array types. In that case, take the address,
4503 convert the fat pointer types, and dereference. */
4504 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4505 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4506 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4507 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4508 build_unary_op (ADDR_EXPR, NULL_TREE,
4512 expr = maybe_unconstrained_array (expr);
4513 etype = TREE_TYPE (expr);
4514 if (can_fold_for_view_convert_p (expr))
4515 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4517 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4520 /* If the result is an integral type whose precision is not equal to its
4521 size, sign- or zero-extend the result. We need not do this if the input
4522 is an integral type of the same precision and signedness or if the output
4523 is a biased type or if both the input and output are unsigned. */
4525 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4526 && !(TREE_CODE (type) == INTEGER_TYPE
4527 && TYPE_BIASED_REPRESENTATION_P (type))
4528 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4529 GET_MODE_BITSIZE (TYPE_MODE (type)))
4530 && !(INTEGRAL_TYPE_P (etype)
4531 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4532 && operand_equal_p (TYPE_RM_SIZE (type),
4533 (TYPE_RM_SIZE (etype) != 0
4534 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4536 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4538 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4539 TYPE_UNSIGNED (type));
4541 = convert (base_type,
4542 size_binop (MINUS_EXPR,
4544 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4545 TYPE_RM_SIZE (type)));
4548 build_binary_op (RSHIFT_EXPR, base_type,
4549 build_binary_op (LSHIFT_EXPR, base_type,
4550 convert (base_type, expr),
4555 /* An unchecked conversion should never raise Constraint_Error. The code
4556 below assumes that GCC's conversion routines overflow the same way that
4557 the underlying hardware does. This is probably true. In the rare case
4558 when it is false, we can rely on the fact that such conversions are
4559 erroneous anyway. */
4560 if (TREE_CODE (expr) == INTEGER_CST)
4561 TREE_OVERFLOW (expr) = 0;
4563 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4564 show no longer constant. */
4565 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4566 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4568 TREE_CONSTANT (expr) = 0;
4573 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4574 the latter being a record type as predicated by Is_Record_Type. */
4577 tree_code_for_record_type (Entity_Id gnat_type)
4579 Node_Id component_list
4580 = Component_List (Type_Definition
4582 (Implementation_Base_Type (gnat_type))));
4585 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4586 we have a non-discriminant field outside a variant. In either case,
4587 it's a RECORD_TYPE. */
4589 if (!Is_Unchecked_Union (gnat_type))
4592 for (component = First_Non_Pragma (Component_Items (component_list));
4593 Present (component);
4594 component = Next_Non_Pragma (component))
4595 if (Ekind (Defining_Entity (component)) == E_Component)
4601 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4602 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4603 according to the presence of an alignment clause on the type or, if it
4604 is an array, on the component type. */
4607 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4609 gnat_type = Underlying_Type (gnat_type);
4611 *align_clause = Present (Alignment_Clause (gnat_type));
4613 if (Is_Array_Type (gnat_type))
4615 gnat_type = Underlying_Type (Component_Type (gnat_type));
4616 if (Present (Alignment_Clause (gnat_type)))
4617 *align_clause = true;
4620 if (!Is_Floating_Point_Type (gnat_type))
4623 if (UI_To_Int (Esize (gnat_type)) != 64)
4629 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4630 size is greater or equal to 64 bits, or an array of such a type. Set
4631 ALIGN_CLAUSE according to the presence of an alignment clause on the
4632 type or, if it is an array, on the component type. */
4635 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4637 gnat_type = Underlying_Type (gnat_type);
4639 *align_clause = Present (Alignment_Clause (gnat_type));
4641 if (Is_Array_Type (gnat_type))
4643 gnat_type = Underlying_Type (Component_Type (gnat_type));
4644 if (Present (Alignment_Clause (gnat_type)))
4645 *align_clause = true;
4648 if (!Is_Scalar_Type (gnat_type))
4651 if (UI_To_Int (Esize (gnat_type)) < 64)
4657 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4658 component of an aggregate type. */
4661 type_for_nonaliased_component_p (tree gnu_type)
4663 /* If the type is passed by reference, we may have pointers to the
4664 component so it cannot be made non-aliased. */
4665 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4668 /* We used to say that any component of aggregate type is aliased
4669 because the front-end may take 'Reference of it. The front-end
4670 has been enhanced in the meantime so as to use a renaming instead
4671 in most cases, but the back-end can probably take the address of
4672 such a component too so we go for the conservative stance.
4674 For instance, we might need the address of any array type, even
4675 if normally passed by copy, to construct a fat pointer if the
4676 component is used as an actual for an unconstrained formal.
4678 Likewise for record types: even if a specific record subtype is
4679 passed by copy, the parent type might be passed by ref (e.g. if
4680 it's of variable size) and we might take the address of a child
4681 component to pass to a parent formal. We have no way to check
4682 for such conditions here. */
4683 if (AGGREGATE_TYPE_P (gnu_type))
4689 /* Perform final processing on global variables. */
4692 gnat_write_global_declarations (void)
4694 /* Proceed to optimize and emit assembly.
4695 FIXME: shouldn't be the front end's responsibility to call this. */
4696 cgraph_finalize_compilation_unit ();
4698 /* Emit debug info for all global declarations. */
4699 emit_debug_global_declarations (VEC_address (tree, global_decls),
4700 VEC_length (tree, global_decls));
4703 /* ************************************************************************
4704 * * GCC builtins support *
4705 * ************************************************************************ */
4707 /* The general scheme is fairly simple:
4709 For each builtin function/type to be declared, gnat_install_builtins calls
4710 internal facilities which eventually get to gnat_push_decl, which in turn
4711 tracks the so declared builtin function decls in the 'builtin_decls' global
4712 datastructure. When an Intrinsic subprogram declaration is processed, we
4713 search this global datastructure to retrieve the associated BUILT_IN DECL
4716 /* Search the chain of currently available builtin declarations for a node
4717 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4718 found, if any, or NULL_TREE otherwise. */
4720 builtin_decl_for (tree name)
4725 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4726 if (DECL_NAME (decl) == name)
4732 /* The code below eventually exposes gnat_install_builtins, which declares
4733 the builtin types and functions we might need, either internally or as
4734 user accessible facilities.
4736 ??? This is a first implementation shot, still in rough shape. It is
4737 heavily inspired from the "C" family implementation, with chunks copied
4738 verbatim from there.
4740 Two obvious TODO candidates are
4741 o Use a more efficient name/decl mapping scheme
4742 o Devise a middle-end infrastructure to avoid having to copy
4743 pieces between front-ends. */
4745 /* ----------------------------------------------------------------------- *
4746 * BUILTIN ELEMENTARY TYPES *
4747 * ----------------------------------------------------------------------- */
4749 /* Standard data types to be used in builtin argument declarations. */
4753 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4755 CTI_CONST_STRING_TYPE,
4760 static tree c_global_trees[CTI_MAX];
4762 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4763 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4764 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4766 /* ??? In addition some attribute handlers, we currently don't support a
4767 (small) number of builtin-types, which in turns inhibits support for a
4768 number of builtin functions. */
4769 #define wint_type_node void_type_node
4770 #define intmax_type_node void_type_node
4771 #define uintmax_type_node void_type_node
4773 /* Build the void_list_node (void_type_node having been created). */
4776 build_void_list_node (void)
4778 tree t = build_tree_list (NULL_TREE, void_type_node);
4782 /* Used to help initialize the builtin-types.def table. When a type of
4783 the correct size doesn't exist, use error_mark_node instead of NULL.
4784 The later results in segfaults even when a decl using the type doesn't
4788 builtin_type_for_size (int size, bool unsignedp)
4790 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4791 return type ? type : error_mark_node;
4794 /* Build/push the elementary type decls that builtin functions/types
4798 install_builtin_elementary_types (void)
4800 signed_size_type_node = size_type_node;
4801 pid_type_node = integer_type_node;
4802 void_list_node = build_void_list_node ();
4804 string_type_node = build_pointer_type (char_type_node);
4805 const_string_type_node
4806 = build_pointer_type (build_qualified_type
4807 (char_type_node, TYPE_QUAL_CONST));
4810 /* ----------------------------------------------------------------------- *
4811 * BUILTIN FUNCTION TYPES *
4812 * ----------------------------------------------------------------------- */
4814 /* Now, builtin function types per se. */
4818 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4819 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4820 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4821 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4822 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4823 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4824 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4825 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4826 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4827 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4828 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4829 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4830 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4831 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4832 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4834 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4835 #include "builtin-types.def"
4836 #undef DEF_PRIMITIVE_TYPE
4837 #undef DEF_FUNCTION_TYPE_0
4838 #undef DEF_FUNCTION_TYPE_1
4839 #undef DEF_FUNCTION_TYPE_2
4840 #undef DEF_FUNCTION_TYPE_3
4841 #undef DEF_FUNCTION_TYPE_4
4842 #undef DEF_FUNCTION_TYPE_5
4843 #undef DEF_FUNCTION_TYPE_6
4844 #undef DEF_FUNCTION_TYPE_7
4845 #undef DEF_FUNCTION_TYPE_VAR_0
4846 #undef DEF_FUNCTION_TYPE_VAR_1
4847 #undef DEF_FUNCTION_TYPE_VAR_2
4848 #undef DEF_FUNCTION_TYPE_VAR_3
4849 #undef DEF_FUNCTION_TYPE_VAR_4
4850 #undef DEF_FUNCTION_TYPE_VAR_5
4851 #undef DEF_POINTER_TYPE
4855 typedef enum c_builtin_type builtin_type;
4857 /* A temporary array used in communication with def_fn_type. */
4858 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4860 /* A helper function for install_builtin_types. Build function type
4861 for DEF with return type RET and N arguments. If VAR is true, then the
4862 function should be variadic after those N arguments.
4864 Takes special care not to ICE if any of the types involved are
4865 error_mark_node, which indicates that said type is not in fact available
4866 (see builtin_type_for_size). In which case the function type as a whole
4867 should be error_mark_node. */
4870 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4872 tree args = NULL, t;
4877 for (i = 0; i < n; ++i)
4879 builtin_type a = (builtin_type) va_arg (list, int);
4880 t = builtin_types[a];
4881 if (t == error_mark_node)
4883 args = tree_cons (NULL_TREE, t, args);
4887 args = nreverse (args);
4889 args = chainon (args, void_list_node);
4891 t = builtin_types[ret];
4892 if (t == error_mark_node)
4894 t = build_function_type (t, args);
4897 builtin_types[def] = t;
4900 /* Build the builtin function types and install them in the builtin_types
4901 array for later use in builtin function decls. */
4904 install_builtin_function_types (void)
4906 tree va_list_ref_type_node;
4907 tree va_list_arg_type_node;
4909 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4911 va_list_arg_type_node = va_list_ref_type_node =
4912 build_pointer_type (TREE_TYPE (va_list_type_node));
4916 va_list_arg_type_node = va_list_type_node;
4917 va_list_ref_type_node = build_reference_type (va_list_type_node);
4920 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4921 builtin_types[ENUM] = VALUE;
4922 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4923 def_fn_type (ENUM, RETURN, 0, 0);
4924 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4925 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4926 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4927 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4928 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4929 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4930 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4931 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4932 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4933 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4934 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4936 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4937 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4939 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4940 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4941 def_fn_type (ENUM, RETURN, 1, 0);
4942 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4943 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4944 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4945 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4946 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4947 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4948 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4949 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4950 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4951 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4952 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4953 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4955 #include "builtin-types.def"
4957 #undef DEF_PRIMITIVE_TYPE
4958 #undef DEF_FUNCTION_TYPE_1
4959 #undef DEF_FUNCTION_TYPE_2
4960 #undef DEF_FUNCTION_TYPE_3
4961 #undef DEF_FUNCTION_TYPE_4
4962 #undef DEF_FUNCTION_TYPE_5
4963 #undef DEF_FUNCTION_TYPE_6
4964 #undef DEF_FUNCTION_TYPE_VAR_0
4965 #undef DEF_FUNCTION_TYPE_VAR_1
4966 #undef DEF_FUNCTION_TYPE_VAR_2
4967 #undef DEF_FUNCTION_TYPE_VAR_3
4968 #undef DEF_FUNCTION_TYPE_VAR_4
4969 #undef DEF_FUNCTION_TYPE_VAR_5
4970 #undef DEF_POINTER_TYPE
4971 builtin_types[(int) BT_LAST] = NULL_TREE;
4974 /* ----------------------------------------------------------------------- *
4975 * BUILTIN ATTRIBUTES *
4976 * ----------------------------------------------------------------------- */
4978 enum built_in_attribute
4980 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4981 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4982 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4983 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4984 #include "builtin-attrs.def"
4985 #undef DEF_ATTR_NULL_TREE
4987 #undef DEF_ATTR_IDENT
4988 #undef DEF_ATTR_TREE_LIST
4992 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4995 install_builtin_attributes (void)
4997 /* Fill in the built_in_attributes array. */
4998 #define DEF_ATTR_NULL_TREE(ENUM) \
4999 built_in_attributes[(int) ENUM] = NULL_TREE;
5000 #define DEF_ATTR_INT(ENUM, VALUE) \
5001 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5002 #define DEF_ATTR_IDENT(ENUM, STRING) \
5003 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5004 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5005 built_in_attributes[(int) ENUM] \
5006 = tree_cons (built_in_attributes[(int) PURPOSE], \
5007 built_in_attributes[(int) VALUE], \
5008 built_in_attributes[(int) CHAIN]);
5009 #include "builtin-attrs.def"
5010 #undef DEF_ATTR_NULL_TREE
5012 #undef DEF_ATTR_IDENT
5013 #undef DEF_ATTR_TREE_LIST
5016 /* Handle a "const" attribute; arguments as in
5017 struct attribute_spec.handler. */
5020 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5021 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5024 if (TREE_CODE (*node) == FUNCTION_DECL)
5025 TREE_READONLY (*node) = 1;
5027 *no_add_attrs = true;
5032 /* Handle a "nothrow" attribute; arguments as in
5033 struct attribute_spec.handler. */
5036 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5037 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5040 if (TREE_CODE (*node) == FUNCTION_DECL)
5041 TREE_NOTHROW (*node) = 1;
5043 *no_add_attrs = true;
5048 /* Handle a "pure" attribute; arguments as in
5049 struct attribute_spec.handler. */
5052 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5053 int ARG_UNUSED (flags), bool *no_add_attrs)
5055 if (TREE_CODE (*node) == FUNCTION_DECL)
5056 DECL_PURE_P (*node) = 1;
5057 /* ??? TODO: Support types. */
5060 warning (OPT_Wattributes, "%qE attribute ignored", name);
5061 *no_add_attrs = true;
5067 /* Handle a "no vops" attribute; arguments as in
5068 struct attribute_spec.handler. */
5071 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5072 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5073 bool *ARG_UNUSED (no_add_attrs))
5075 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5076 DECL_IS_NOVOPS (*node) = 1;
5080 /* Helper for nonnull attribute handling; fetch the operand number
5081 from the attribute argument list. */
5084 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5086 /* Verify the arg number is a constant. */
5087 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5088 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5091 *valp = TREE_INT_CST_LOW (arg_num_expr);
5095 /* Handle the "nonnull" attribute. */
5097 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5098 tree args, int ARG_UNUSED (flags),
5102 unsigned HOST_WIDE_INT attr_arg_num;
5104 /* If no arguments are specified, all pointer arguments should be
5105 non-null. Verify a full prototype is given so that the arguments
5106 will have the correct types when we actually check them later. */
5109 if (!TYPE_ARG_TYPES (type))
5111 error ("nonnull attribute without arguments on a non-prototype");
5112 *no_add_attrs = true;
5117 /* Argument list specified. Verify that each argument number references
5118 a pointer argument. */
5119 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5122 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5124 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5126 error ("nonnull argument has invalid operand number (argument %lu)",
5127 (unsigned long) attr_arg_num);
5128 *no_add_attrs = true;
5132 argument = TYPE_ARG_TYPES (type);
5135 for (ck_num = 1; ; ck_num++)
5137 if (!argument || ck_num == arg_num)
5139 argument = TREE_CHAIN (argument);
5143 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5145 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5146 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5147 *no_add_attrs = true;
5151 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5153 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5154 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5155 *no_add_attrs = true;
5164 /* Handle a "sentinel" attribute. */
5167 handle_sentinel_attribute (tree *node, tree name, tree args,
5168 int ARG_UNUSED (flags), bool *no_add_attrs)
5170 tree params = TYPE_ARG_TYPES (*node);
5174 warning (OPT_Wattributes,
5175 "%qE attribute requires prototypes with named arguments", name);
5176 *no_add_attrs = true;
5180 while (TREE_CHAIN (params))
5181 params = TREE_CHAIN (params);
5183 if (VOID_TYPE_P (TREE_VALUE (params)))
5185 warning (OPT_Wattributes,
5186 "%qE attribute only applies to variadic functions", name);
5187 *no_add_attrs = true;
5193 tree position = TREE_VALUE (args);
5195 if (TREE_CODE (position) != INTEGER_CST)
5197 warning (0, "requested position is not an integer constant");
5198 *no_add_attrs = true;
5202 if (tree_int_cst_lt (position, integer_zero_node))
5204 warning (0, "requested position is less than zero");
5205 *no_add_attrs = true;
5213 /* Handle a "noreturn" attribute; arguments as in
5214 struct attribute_spec.handler. */
5217 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5218 int ARG_UNUSED (flags), bool *no_add_attrs)
5220 tree type = TREE_TYPE (*node);
5222 /* See FIXME comment in c_common_attribute_table. */
5223 if (TREE_CODE (*node) == FUNCTION_DECL)
5224 TREE_THIS_VOLATILE (*node) = 1;
5225 else if (TREE_CODE (type) == POINTER_TYPE
5226 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5228 = build_pointer_type
5229 (build_type_variant (TREE_TYPE (type),
5230 TYPE_READONLY (TREE_TYPE (type)), 1));
5233 warning (OPT_Wattributes, "%qE attribute ignored", name);
5234 *no_add_attrs = true;
5240 /* Handle a "malloc" attribute; arguments as in
5241 struct attribute_spec.handler. */
5244 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5245 int ARG_UNUSED (flags), bool *no_add_attrs)
5247 if (TREE_CODE (*node) == FUNCTION_DECL
5248 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5249 DECL_IS_MALLOC (*node) = 1;
5252 warning (OPT_Wattributes, "%qE attribute ignored", name);
5253 *no_add_attrs = true;
5259 /* Fake handler for attributes we don't properly support. */
5262 fake_attribute_handler (tree * ARG_UNUSED (node),
5263 tree ARG_UNUSED (name),
5264 tree ARG_UNUSED (args),
5265 int ARG_UNUSED (flags),
5266 bool * ARG_UNUSED (no_add_attrs))
5271 /* Handle a "type_generic" attribute. */
5274 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5275 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5276 bool * ARG_UNUSED (no_add_attrs))
5280 /* Ensure we have a function type. */
5281 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5283 params = TYPE_ARG_TYPES (*node);
5284 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5285 params = TREE_CHAIN (params);
5287 /* Ensure we have a variadic function. */
5288 gcc_assert (!params);
5293 /* Handle a "vector_size" attribute; arguments as in
5294 struct attribute_spec.handler. */
5297 handle_vector_size_attribute (tree *node, tree name, tree args,
5298 int ARG_UNUSED (flags),
5301 unsigned HOST_WIDE_INT vecsize, nunits;
5302 enum machine_mode orig_mode;
5303 tree type = *node, new_type, size;
5305 *no_add_attrs = true;
5307 size = TREE_VALUE (args);
5309 if (!host_integerp (size, 1))
5311 warning (OPT_Wattributes, "%qE attribute ignored", name);
5315 /* Get the vector size (in bytes). */
5316 vecsize = tree_low_cst (size, 1);
5318 /* We need to provide for vector pointers, vector arrays, and
5319 functions returning vectors. For example:
5321 __attribute__((vector_size(16))) short *foo;
5323 In this case, the mode is SI, but the type being modified is
5324 HI, so we need to look further. */
5326 while (POINTER_TYPE_P (type)
5327 || TREE_CODE (type) == FUNCTION_TYPE
5328 || TREE_CODE (type) == METHOD_TYPE
5329 || TREE_CODE (type) == ARRAY_TYPE
5330 || TREE_CODE (type) == OFFSET_TYPE)
5331 type = TREE_TYPE (type);
5333 /* Get the mode of the type being modified. */
5334 orig_mode = TYPE_MODE (type);
5336 if ((!INTEGRAL_TYPE_P (type)
5337 && !SCALAR_FLOAT_TYPE_P (type)
5338 && !FIXED_POINT_TYPE_P (type))
5339 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5340 && GET_MODE_CLASS (orig_mode) != MODE_INT
5341 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5342 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5343 || TREE_CODE (type) == BOOLEAN_TYPE)
5345 error ("invalid vector type for attribute %qE", name);
5349 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5351 error ("vector size not an integral multiple of component size");
5357 error ("zero vector size");
5361 /* Calculate how many units fit in the vector. */
5362 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5363 if (nunits & (nunits - 1))
5365 error ("number of components of the vector not a power of two");
5369 new_type = build_vector_type (type, nunits);
5371 /* Build back pointers if needed. */
5372 *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5377 /* ----------------------------------------------------------------------- *
5378 * BUILTIN FUNCTIONS *
5379 * ----------------------------------------------------------------------- */
5381 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5382 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5383 if nonansi_p and flag_no_nonansi_builtin. */
5386 def_builtin_1 (enum built_in_function fncode,
5388 enum built_in_class fnclass,
5389 tree fntype, tree libtype,
5390 bool both_p, bool fallback_p,
5391 bool nonansi_p ATTRIBUTE_UNUSED,
5392 tree fnattrs, bool implicit_p)
5395 const char *libname;
5397 /* Preserve an already installed decl. It most likely was setup in advance
5398 (e.g. as part of the internal builtins) for specific reasons. */
5399 if (built_in_decls[(int) fncode] != NULL_TREE)
5402 gcc_assert ((!both_p && !fallback_p)
5403 || !strncmp (name, "__builtin_",
5404 strlen ("__builtin_")));
5406 libname = name + strlen ("__builtin_");
5407 decl = add_builtin_function (name, fntype, fncode, fnclass,
5408 (fallback_p ? libname : NULL),
5411 /* ??? This is normally further controlled by command-line options
5412 like -fno-builtin, but we don't have them for Ada. */
5413 add_builtin_function (libname, libtype, fncode, fnclass,
5416 built_in_decls[(int) fncode] = decl;
5418 implicit_built_in_decls[(int) fncode] = decl;
5421 static int flag_isoc94 = 0;
5422 static int flag_isoc99 = 0;
5424 /* Install what the common builtins.def offers. */
5427 install_builtin_functions (void)
5429 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5430 NONANSI_P, ATTRS, IMPLICIT, COND) \
5432 def_builtin_1 (ENUM, NAME, CLASS, \
5433 builtin_types[(int) TYPE], \
5434 builtin_types[(int) LIBTYPE], \
5435 BOTH_P, FALLBACK_P, NONANSI_P, \
5436 built_in_attributes[(int) ATTRS], IMPLICIT);
5437 #include "builtins.def"
5441 /* ----------------------------------------------------------------------- *
5442 * BUILTIN FUNCTIONS *
5443 * ----------------------------------------------------------------------- */
5445 /* Install the builtin functions we might need. */
5448 gnat_install_builtins (void)
5450 install_builtin_elementary_types ();
5451 install_builtin_function_types ();
5452 install_builtin_attributes ();
5454 /* Install builtins used by generic middle-end pieces first. Some of these
5455 know about internal specificities and control attributes accordingly, for
5456 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5457 the generic definition from builtins.def. */
5458 build_common_builtin_nodes (false);
5460 /* Now, install the target specific builtins, such as the AltiVec family on
5461 ppc, and the common set as exposed by builtins.def. */
5462 targetm.init_builtins ();
5463 install_builtin_functions ();
5466 #include "gt-ada-utils.h"
5467 #include "gtype-ada.h"