1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
32 #include "coretypes.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
48 #include "tree-dump.h"
49 #include "pointer-set.h"
50 #include "langhooks.h"
67 #ifndef MAX_FIXED_MODE_SIZE
68 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
71 #ifndef MAX_BITS_PER_WORD
72 #define MAX_BITS_PER_WORD BITS_PER_WORD
75 /* If nonzero, pretend we are allocating at global level. */
78 /* Tree nodes for the various types and decls we create. */
79 tree gnat_std_decls[(int) ADT_LAST];
81 /* Functions to call for each of the possible raise reasons. */
82 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84 /* Forward declarations for handlers of attributes. */
85 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 /* Fake handler for attributes we don't properly support, typically because
96 they'd require dragging a lot of the common-c front-end circuitry. */
97 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
99 /* Table of machine-independent internal attributes for Ada. We support
100 this minimal set of attributes to accommodate the needs of builtins. */
101 const struct attribute_spec gnat_internal_attribute_table[] =
103 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
104 { "const", 0, 0, true, false, false, handle_const_attribute },
105 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
106 { "pure", 0, 0, true, false, false, handle_pure_attribute },
107 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
108 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
109 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
110 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
111 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
112 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
114 /* ??? format and format_arg are heavy and not supported, which actually
115 prevents support for stdio builtins, which we however declare as part
116 of the common builtins.def contents. */
117 { "format", 3, 3, false, true, true, fake_attribute_handler },
118 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
120 { NULL, 0, 0, false, false, false, NULL }
123 /* Associates a GNAT tree node to a GCC tree node. It is used in
124 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
125 of `save_gnu_tree' for more info. */
126 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
128 #define GET_GNU_TREE(GNAT_ENTITY) \
129 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
131 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
132 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
134 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
135 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
137 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
138 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
140 #define GET_DUMMY_NODE(GNAT_ENTITY) \
141 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
143 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
144 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
146 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
147 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
149 /* This variable keeps a table for types for each precision so that we only
150 allocate each of them once. Signed and unsigned types are kept separate.
152 Note that these types are only used when fold-const requests something
153 special. Perhaps we should NOT share these types; we'll see how it
155 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
157 /* Likewise for float types, but record these by mode. */
158 static GTY(()) tree float_types[NUM_MACHINE_MODES];
160 /* For each binding contour we allocate a binding_level structure to indicate
161 the binding depth. */
163 struct gnat_binding_level GTY((chain_next ("%h.chain")))
165 /* The binding level containing this one (the enclosing binding level). */
166 struct gnat_binding_level *chain;
167 /* The BLOCK node for this level. */
169 /* If nonzero, the setjmp buffer that needs to be updated for any
170 variable-sized definition within this context. */
174 /* The binding level currently in effect. */
175 static GTY(()) struct gnat_binding_level *current_binding_level;
177 /* A chain of gnat_binding_level structures awaiting reuse. */
178 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
180 /* An array of global declarations. */
181 static GTY(()) VEC(tree,gc) *global_decls;
183 /* An array of builtin function declarations. */
184 static GTY(()) VEC(tree,gc) *builtin_decls;
186 /* An array of global renaming pointers. */
187 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
189 /* A chain of unused BLOCK nodes. */
190 static GTY((deletable)) tree free_block_chain;
192 static tree merge_sizes (tree, tree, tree, bool, bool);
193 static tree compute_related_constant (tree, tree);
194 static tree split_plus (tree, tree *);
195 static void gnat_gimplify_function (tree);
196 static tree float_type_for_precision (int, enum machine_mode);
197 static tree convert_to_fat_pointer (tree, tree);
198 static tree convert_to_thin_pointer (tree, tree);
199 static tree make_descriptor_field (const char *,tree, tree, tree);
200 static bool potential_alignment_gap (tree, tree, tree);
202 /* Initialize the association of GNAT nodes to GCC trees. */
205 init_gnat_to_gnu (void)
207 associate_gnat_to_gnu
208 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
211 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
212 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
213 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
215 If GNU_DECL is zero, a previous association is to be reset. */
218 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
220 /* Check that GNAT_ENTITY is not already defined and that it is being set
221 to something which is a decl. Raise gigi 401 if not. Usually, this
222 means GNAT_ENTITY is defined twice, but occasionally is due to some
224 gcc_assert (!(gnu_decl
225 && (PRESENT_GNU_TREE (gnat_entity)
226 || (!no_check && !DECL_P (gnu_decl)))));
228 SET_GNU_TREE (gnat_entity, gnu_decl);
231 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
232 Return the ..._DECL node that was associated with it. If there is no tree
233 node associated with GNAT_ENTITY, abort.
235 In some cases, such as delayed elaboration or expressions that need to
236 be elaborated only once, GNAT_ENTITY is really not an entity. */
239 get_gnu_tree (Entity_Id gnat_entity)
241 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
242 return GET_GNU_TREE (gnat_entity);
245 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
248 present_gnu_tree (Entity_Id gnat_entity)
250 return PRESENT_GNU_TREE (gnat_entity);
253 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
256 init_dummy_type (void)
259 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
262 /* Make a dummy type corresponding to GNAT_TYPE. */
265 make_dummy_type (Entity_Id gnat_type)
267 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
270 /* If there is an equivalent type, get its underlying type. */
271 if (Present (gnat_underlying))
272 gnat_underlying = Underlying_Type (gnat_underlying);
274 /* If there was no equivalent type (can only happen when just annotating
275 types) or underlying type, go back to the original type. */
276 if (No (gnat_underlying))
277 gnat_underlying = gnat_type;
279 /* If it there already a dummy type, use that one. Else make one. */
280 if (PRESENT_DUMMY_NODE (gnat_underlying))
281 return GET_DUMMY_NODE (gnat_underlying);
283 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
285 gnu_type = make_node (Is_Record_Type (gnat_underlying)
286 ? tree_code_for_record_type (gnat_underlying)
288 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
289 TYPE_DUMMY_P (gnu_type) = 1;
290 TYPE_STUB_DECL (gnu_type)
291 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
292 if (AGGREGATE_TYPE_P (gnu_type))
293 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
295 SET_DUMMY_NODE (gnat_underlying, gnu_type);
300 /* Return nonzero if we are currently in the global binding level. */
303 global_bindings_p (void)
305 return ((force_global || !current_function_decl) ? -1 : 0);
308 /* Enter a new binding level. */
313 struct gnat_binding_level *newlevel = NULL;
315 /* Reuse a struct for this binding level, if there is one. */
316 if (free_binding_level)
318 newlevel = free_binding_level;
319 free_binding_level = free_binding_level->chain;
323 = (struct gnat_binding_level *)
324 ggc_alloc (sizeof (struct gnat_binding_level));
326 /* Use a free BLOCK, if any; otherwise, allocate one. */
327 if (free_block_chain)
329 newlevel->block = free_block_chain;
330 free_block_chain = BLOCK_CHAIN (free_block_chain);
331 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
334 newlevel->block = make_node (BLOCK);
336 /* Point the BLOCK we just made to its parent. */
337 if (current_binding_level)
338 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
340 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
341 TREE_USED (newlevel->block) = 1;
343 /* Add this level to the front of the chain (stack) of levels that are
345 newlevel->chain = current_binding_level;
346 newlevel->jmpbuf_decl = NULL_TREE;
347 current_binding_level = newlevel;
350 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
351 and point FNDECL to this BLOCK. */
354 set_current_block_context (tree fndecl)
356 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
357 DECL_INITIAL (fndecl) = current_binding_level->block;
360 /* Set the jmpbuf_decl for the current binding level to DECL. */
363 set_block_jmpbuf_decl (tree decl)
365 current_binding_level->jmpbuf_decl = decl;
368 /* Get the jmpbuf_decl, if any, for the current binding level. */
371 get_block_jmpbuf_decl ()
373 return current_binding_level->jmpbuf_decl;
376 /* Exit a binding level. Set any BLOCK into the current code group. */
381 struct gnat_binding_level *level = current_binding_level;
382 tree block = level->block;
384 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
385 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
387 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
388 are no variables free the block and merge its subblocks into those of its
389 parent block. Otherwise, add it to the list of its parent. */
390 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
392 else if (BLOCK_VARS (block) == NULL_TREE)
394 BLOCK_SUBBLOCKS (level->chain->block)
395 = chainon (BLOCK_SUBBLOCKS (block),
396 BLOCK_SUBBLOCKS (level->chain->block));
397 BLOCK_CHAIN (block) = free_block_chain;
398 free_block_chain = block;
402 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
403 BLOCK_SUBBLOCKS (level->chain->block) = block;
404 TREE_USED (block) = 1;
405 set_block_for_group (block);
408 /* Free this binding structure. */
409 current_binding_level = level->chain;
410 level->chain = free_binding_level;
411 free_binding_level = level;
415 /* Records a ..._DECL node DECL as belonging to the current lexical scope
416 and uses GNAT_NODE for location information and propagating flags. */
419 gnat_pushdecl (tree decl, Node_Id gnat_node)
421 /* If this decl is public external or at toplevel, there is no context.
422 But PARM_DECLs always go in the level of its function. */
423 if (TREE_CODE (decl) != PARM_DECL
424 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
425 || global_bindings_p ()))
426 DECL_CONTEXT (decl) = 0;
429 DECL_CONTEXT (decl) = current_function_decl;
431 /* Functions imported in another function are not really nested. */
432 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
433 DECL_NO_STATIC_CHAIN (decl) = 1;
436 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
438 /* Set the location of DECL and emit a declaration for it. */
439 if (Present (gnat_node))
440 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
441 add_decl_expr (decl, gnat_node);
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later. Put global variables in the
445 globals list and builtin functions in a dedicated list to speed up
446 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
447 the list, as they will cause trouble with the debugger and aren't needed
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
452 if (global_bindings_p ())
454 VEC_safe_push (tree, gc, global_decls, decl);
456 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
457 VEC_safe_push (tree, gc, builtin_decls, decl);
461 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
462 BLOCK_VARS (current_binding_level->block) = decl;
466 /* For the declaration of a type, set its name if it either is not already
467 set or if the previous type name was not derived from a source name.
468 We'd rather have the type named with a real name and all the pointer
469 types to the same object have the same POINTER_TYPE node. Code in the
470 equivalent function of c-decl.c makes a copy of the type node here, but
471 that may cause us trouble with incomplete types. We make an exception
472 for fat pointer types because the compiler automatically builds them
473 for unconstrained array types and the debugger uses them to represent
474 both these and pointers to these. */
475 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
477 tree t = TREE_TYPE (decl);
479 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
481 else if (TYPE_FAT_POINTER_P (t))
483 tree tt = build_variant_type_copy (t);
484 TYPE_NAME (tt) = decl;
485 TREE_USED (tt) = TREE_USED (t);
486 TREE_TYPE (decl) = tt;
487 DECL_ORIGINAL_TYPE (decl) = t;
490 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
495 /* Propagate the name to all the variants. This is needed for
496 the type qualifiers machinery to work properly. */
498 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
499 TYPE_NAME (t) = decl;
503 /* Do little here. Set up the standard declarations later after the
504 front end has been run. */
507 gnat_init_decl_processing (void)
509 /* Make the binding_level structure for global names. */
510 current_function_decl = 0;
511 current_binding_level = 0;
512 free_binding_level = 0;
515 build_common_tree_nodes (true, true);
517 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
518 corresponding to the width of Pmode. In most cases when ptr_mode
519 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
520 But we get far better code using the width of Pmode. */
521 size_type_node = gnat_type_for_mode (Pmode, 0);
522 set_sizetype (size_type_node);
524 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
525 boolean_type_node = make_node (BOOLEAN_TYPE);
526 TYPE_PRECISION (boolean_type_node) = 1;
527 fixup_unsigned_type (boolean_type_node);
528 TYPE_RM_SIZE (boolean_type_node) = bitsize_int (1);
530 build_common_tree_nodes_2 (0);
532 ptr_void_type_node = build_pointer_type (void_type_node);
535 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
538 record_builtin_type (const char *name, tree type)
540 tree type_decl = build_decl (TYPE_DECL, get_identifier (name), type);
542 gnat_pushdecl (type_decl, Empty);
544 if (debug_hooks->type_decl)
545 debug_hooks->type_decl (type_decl, false);
548 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
549 finish constructing the record or union type. If REP_LEVEL is zero, this
550 record has no representation clause and so will be entirely laid out here.
551 If REP_LEVEL is one, this record has a representation clause and has been
552 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
553 this record is derived from a parent record and thus inherits its layout;
554 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
555 true, the record type is expected to be modified afterwards so it will
556 not be sent to the back-end for finalization. */
559 finish_record_type (tree record_type, tree fieldlist, int rep_level,
560 bool do_not_finalize)
562 enum tree_code code = TREE_CODE (record_type);
563 tree name = TYPE_NAME (record_type);
564 tree ada_size = bitsize_zero_node;
565 tree size = bitsize_zero_node;
566 bool had_size = TYPE_SIZE (record_type) != 0;
567 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
568 bool had_align = TYPE_ALIGN (record_type) != 0;
571 TYPE_FIELDS (record_type) = fieldlist;
573 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
574 generate debug info and have a parallel type. */
575 if (name && TREE_CODE (name) == TYPE_DECL)
576 name = DECL_NAME (name);
577 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
579 /* Globally initialize the record first. If this is a rep'ed record,
580 that just means some initializations; otherwise, layout the record. */
583 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
584 SET_TYPE_MODE (record_type, BLKmode);
587 TYPE_SIZE_UNIT (record_type) = size_zero_node;
589 TYPE_SIZE (record_type) = bitsize_zero_node;
591 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
592 out just like a UNION_TYPE, since the size will be fixed. */
593 else if (code == QUAL_UNION_TYPE)
598 /* Ensure there isn't a size already set. There can be in an error
599 case where there is a rep clause but all fields have errors and
600 no longer have a position. */
601 TYPE_SIZE (record_type) = 0;
602 layout_type (record_type);
605 /* At this point, the position and size of each field is known. It was
606 either set before entry by a rep clause, or by laying out the type above.
608 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
609 to compute the Ada size; the GCC size and alignment (for rep'ed records
610 that are not padding types); and the mode (for rep'ed records). We also
611 clear the DECL_BIT_FIELD indication for the cases we know have not been
612 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
614 if (code == QUAL_UNION_TYPE)
615 fieldlist = nreverse (fieldlist);
617 for (field = fieldlist; field; field = TREE_CHAIN (field))
619 tree type = TREE_TYPE (field);
620 tree pos = bit_position (field);
621 tree this_size = DECL_SIZE (field);
624 if ((TREE_CODE (type) == RECORD_TYPE
625 || TREE_CODE (type) == UNION_TYPE
626 || TREE_CODE (type) == QUAL_UNION_TYPE)
627 && !TYPE_IS_FAT_POINTER_P (type)
628 && !TYPE_CONTAINS_TEMPLATE_P (type)
629 && TYPE_ADA_SIZE (type))
630 this_ada_size = TYPE_ADA_SIZE (type);
632 this_ada_size = this_size;
634 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
635 if (DECL_BIT_FIELD (field)
636 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
638 unsigned int align = TYPE_ALIGN (type);
640 /* In the general case, type alignment is required. */
641 if (value_factor_p (pos, align))
643 /* The enclosing record type must be sufficiently aligned.
644 Otherwise, if no alignment was specified for it and it
645 has been laid out already, bump its alignment to the
646 desired one if this is compatible with its size. */
647 if (TYPE_ALIGN (record_type) >= align)
649 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
650 DECL_BIT_FIELD (field) = 0;
654 && value_factor_p (TYPE_SIZE (record_type), align))
656 TYPE_ALIGN (record_type) = align;
657 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
658 DECL_BIT_FIELD (field) = 0;
662 /* In the non-strict alignment case, only byte alignment is. */
663 if (!STRICT_ALIGNMENT
664 && DECL_BIT_FIELD (field)
665 && value_factor_p (pos, BITS_PER_UNIT))
666 DECL_BIT_FIELD (field) = 0;
669 /* If we still have DECL_BIT_FIELD set at this point, we know the field
670 is technically not addressable. Except that it can actually be
671 addressed if the field is BLKmode and happens to be properly
673 DECL_NONADDRESSABLE_P (field)
674 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
676 /* A type must be as aligned as its most aligned field that is not
677 a bit-field. But this is already enforced by layout_type. */
678 if (rep_level > 0 && !DECL_BIT_FIELD (field))
679 TYPE_ALIGN (record_type)
680 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
685 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
686 size = size_binop (MAX_EXPR, size, this_size);
689 case QUAL_UNION_TYPE:
691 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
692 this_ada_size, ada_size);
693 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
698 /* Since we know here that all fields are sorted in order of
699 increasing bit position, the size of the record is one
700 higher than the ending bit of the last field processed
701 unless we have a rep clause, since in that case we might
702 have a field outside a QUAL_UNION_TYPE that has a higher ending
703 position. So use a MAX in that case. Also, if this field is a
704 QUAL_UNION_TYPE, we need to take into account the previous size in
705 the case of empty variants. */
707 = merge_sizes (ada_size, pos, this_ada_size,
708 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
710 = merge_sizes (size, pos, this_size,
711 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
719 if (code == QUAL_UNION_TYPE)
720 nreverse (fieldlist);
722 /* If the type is discriminated, it can be used to access all its
723 constrained subtypes, so force structural equality checks. */
724 if (CONTAINS_PLACEHOLDER_P (size))
725 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
729 /* If this is a padding record, we never want to make the size smaller
730 than what was specified in it, if any. */
731 if (TREE_CODE (record_type) == RECORD_TYPE
732 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
733 size = TYPE_SIZE (record_type);
735 /* Now set any of the values we've just computed that apply. */
736 if (!TYPE_IS_FAT_POINTER_P (record_type)
737 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
738 SET_TYPE_ADA_SIZE (record_type, ada_size);
742 tree size_unit = had_size_unit
743 ? TYPE_SIZE_UNIT (record_type)
745 size_binop (CEIL_DIV_EXPR, size,
747 unsigned int align = TYPE_ALIGN (record_type);
749 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
750 TYPE_SIZE_UNIT (record_type)
751 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
753 compute_record_mode (record_type);
757 if (!do_not_finalize)
758 rest_of_record_type_compilation (record_type);
761 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
762 the debug information associated with it. It need not be invoked
763 directly in most cases since finish_record_type takes care of doing
764 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
767 rest_of_record_type_compilation (tree record_type)
769 tree fieldlist = TYPE_FIELDS (record_type);
771 enum tree_code code = TREE_CODE (record_type);
772 bool var_size = false;
774 for (field = fieldlist; field; field = TREE_CHAIN (field))
776 /* We need to make an XVE/XVU record if any field has variable size,
777 whether or not the record does. For example, if we have a union,
778 it may be that all fields, rounded up to the alignment, have the
779 same size, in which case we'll use that size. But the debug
780 output routines (except Dwarf2) won't be able to output the fields,
781 so we need to make the special record. */
782 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
783 /* If a field has a non-constant qualifier, the record will have
784 variable size too. */
785 || (code == QUAL_UNION_TYPE
786 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
793 /* If this record is of variable size, rename it so that the
794 debugger knows it is and make a new, parallel, record
795 that tells the debugger how the record is laid out. See
796 exp_dbug.ads. But don't do this for records that are padding
797 since they confuse GDB. */
799 && !(TREE_CODE (record_type) == RECORD_TYPE
800 && TYPE_IS_PADDING_P (record_type)))
803 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
804 ? UNION_TYPE : TREE_CODE (record_type));
805 tree orig_name = TYPE_NAME (record_type), new_name;
806 tree last_pos = bitsize_zero_node;
807 tree old_field, prev_old_field = NULL_TREE;
809 if (TREE_CODE (orig_name) == TYPE_DECL)
810 orig_name = DECL_NAME (orig_name);
813 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
815 TYPE_NAME (new_record_type) = new_name;
816 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
817 TYPE_STUB_DECL (new_record_type)
818 = create_type_stub_decl (new_name, new_record_type);
819 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
820 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
821 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
822 TYPE_SIZE_UNIT (new_record_type)
823 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
825 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
827 /* Now scan all the fields, replacing each field with a new
828 field corresponding to the new encoding. */
829 for (old_field = TYPE_FIELDS (record_type); old_field;
830 old_field = TREE_CHAIN (old_field))
832 tree field_type = TREE_TYPE (old_field);
833 tree field_name = DECL_NAME (old_field);
835 tree curpos = bit_position (old_field);
837 unsigned int align = 0;
840 /* See how the position was modified from the last position.
842 There are two basic cases we support: a value was added
843 to the last position or the last position was rounded to
844 a boundary and they something was added. Check for the
845 first case first. If not, see if there is any evidence
846 of rounding. If so, round the last position and try
849 If this is a union, the position can be taken as zero. */
851 /* Some computations depend on the shape of the position expression,
852 so strip conversions to make sure it's exposed. */
853 curpos = remove_conversions (curpos, true);
855 if (TREE_CODE (new_record_type) == UNION_TYPE)
856 pos = bitsize_zero_node, align = 0;
858 pos = compute_related_constant (curpos, last_pos);
860 if (!pos && TREE_CODE (curpos) == MULT_EXPR
861 && host_integerp (TREE_OPERAND (curpos, 1), 1))
863 tree offset = TREE_OPERAND (curpos, 0);
864 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
866 /* An offset which is a bitwise AND with a negative power of 2
867 means an alignment corresponding to this power of 2. */
868 offset = remove_conversions (offset, true);
869 if (TREE_CODE (offset) == BIT_AND_EXPR
870 && host_integerp (TREE_OPERAND (offset, 1), 0)
871 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
874 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
875 if (exact_log2 (pow) > 0)
879 pos = compute_related_constant (curpos,
880 round_up (last_pos, align));
882 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
883 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
884 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
885 && host_integerp (TREE_OPERAND
886 (TREE_OPERAND (curpos, 0), 1),
891 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
892 pos = compute_related_constant (curpos,
893 round_up (last_pos, align));
895 else if (potential_alignment_gap (prev_old_field, old_field,
898 align = TYPE_ALIGN (field_type);
899 pos = compute_related_constant (curpos,
900 round_up (last_pos, align));
903 /* If we can't compute a position, set it to zero.
905 ??? We really should abort here, but it's too much work
906 to get this correct for all cases. */
909 pos = bitsize_zero_node;
911 /* See if this type is variable-sized and make a pointer type
912 and indicate the indirection if so. Beware that the debug
913 back-end may adjust the position computed above according
914 to the alignment of the field type, i.e. the pointer type
915 in this case, if we don't preventively counter that. */
916 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
918 field_type = build_pointer_type (field_type);
919 if (align != 0 && TYPE_ALIGN (field_type) > align)
921 field_type = copy_node (field_type);
922 TYPE_ALIGN (field_type) = align;
927 /* Make a new field name, if necessary. */
928 if (var || align != 0)
933 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
934 align / BITS_PER_UNIT);
936 strcpy (suffix, "XVL");
938 field_name = concat_name (field_name, suffix);
941 new_field = create_field_decl (field_name, field_type,
943 DECL_SIZE (old_field), pos, 0);
944 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
945 TYPE_FIELDS (new_record_type) = new_field;
947 /* If old_field is a QUAL_UNION_TYPE, take its size as being
948 zero. The only time it's not the last field of the record
949 is when there are other components at fixed positions after
950 it (meaning there was a rep clause for every field) and we
951 want to be able to encode them. */
952 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
953 (TREE_CODE (TREE_TYPE (old_field))
956 : DECL_SIZE (old_field));
957 prev_old_field = old_field;
960 TYPE_FIELDS (new_record_type)
961 = nreverse (TYPE_FIELDS (new_record_type));
963 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
966 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
969 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
972 add_parallel_type (tree decl, tree parallel_type)
976 while (DECL_PARALLEL_TYPE (d))
977 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
979 SET_DECL_PARALLEL_TYPE (d, parallel_type);
982 /* Return the parallel type associated to a type, if any. */
985 get_parallel_type (tree type)
987 if (TYPE_STUB_DECL (type))
988 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
993 /* Utility function of above to merge LAST_SIZE, the previous size of a record
994 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
995 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
996 replace a value of zero with the old size. If HAS_REP is true, we take the
997 MAX of the end position of this field with LAST_SIZE. In all other cases,
998 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1001 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1004 tree type = TREE_TYPE (last_size);
1007 if (!special || TREE_CODE (size) != COND_EXPR)
1009 new = size_binop (PLUS_EXPR, first_bit, size);
1011 new = size_binop (MAX_EXPR, last_size, new);
1015 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1016 integer_zerop (TREE_OPERAND (size, 1))
1017 ? last_size : merge_sizes (last_size, first_bit,
1018 TREE_OPERAND (size, 1),
1020 integer_zerop (TREE_OPERAND (size, 2))
1021 ? last_size : merge_sizes (last_size, first_bit,
1022 TREE_OPERAND (size, 2),
1025 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1026 when fed through substitute_in_expr) into thinking that a constant
1027 size is not constant. */
1028 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1029 new = TREE_OPERAND (new, 0);
1034 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1035 related by the addition of a constant. Return that constant if so. */
1038 compute_related_constant (tree op0, tree op1)
1040 tree op0_var, op1_var;
1041 tree op0_con = split_plus (op0, &op0_var);
1042 tree op1_con = split_plus (op1, &op1_var);
1043 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1045 if (operand_equal_p (op0_var, op1_var, 0))
1047 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1053 /* Utility function of above to split a tree OP which may be a sum, into a
1054 constant part, which is returned, and a variable part, which is stored
1055 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1059 split_plus (tree in, tree *pvar)
1061 /* Strip NOPS in order to ease the tree traversal and maximize the
1062 potential for constant or plus/minus discovery. We need to be careful
1063 to always return and set *pvar to bitsizetype trees, but it's worth
1067 *pvar = convert (bitsizetype, in);
1069 if (TREE_CODE (in) == INTEGER_CST)
1071 *pvar = bitsize_zero_node;
1072 return convert (bitsizetype, in);
1074 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1076 tree lhs_var, rhs_var;
1077 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1078 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1080 if (lhs_var == TREE_OPERAND (in, 0)
1081 && rhs_var == TREE_OPERAND (in, 1))
1082 return bitsize_zero_node;
1084 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1085 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1088 return bitsize_zero_node;
1091 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1092 subprogram. If it is void_type_node, then we are dealing with a procedure,
1093 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1094 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1095 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1096 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1097 object. RETURNS_BY_REF is true if the function returns by reference.
1098 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1099 first parameter) the address of the place to copy its result. */
1102 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1103 bool returns_unconstrained, bool returns_by_ref,
1104 bool returns_by_target_ptr)
1106 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1107 the subprogram formal parameters. This list is generated by traversing the
1108 input list of PARM_DECL nodes. */
1109 tree param_type_list = NULL;
1113 for (param_decl = param_decl_list; param_decl;
1114 param_decl = TREE_CHAIN (param_decl))
1115 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1118 /* The list of the function parameter types has to be terminated by the void
1119 type to signal to the back-end that we are not dealing with a variable
1120 parameter subprogram, but that the subprogram has a fixed number of
1122 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1124 /* The list of argument types has been created in reverse
1126 param_type_list = nreverse (param_type_list);
1128 type = build_function_type (return_type, param_type_list);
1130 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1131 or the new type should, make a copy of TYPE. Likewise for
1132 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1133 if (TYPE_CI_CO_LIST (type) || cico_list
1134 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1135 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1136 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1137 type = copy_type (type);
1139 TYPE_CI_CO_LIST (type) = cico_list;
1140 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1141 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1142 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1146 /* Return a copy of TYPE but safe to modify in any way. */
1149 copy_type (tree type)
1151 tree new = copy_node (type);
1153 /* copy_node clears this field instead of copying it, because it is
1154 aliased with TREE_CHAIN. */
1155 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1157 TYPE_POINTER_TO (new) = 0;
1158 TYPE_REFERENCE_TO (new) = 0;
1159 TYPE_MAIN_VARIANT (new) = new;
1160 TYPE_NEXT_VARIANT (new) = 0;
1165 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1166 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1170 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1172 /* First build a type for the desired range. */
1173 tree type = build_index_2_type (min, max);
1175 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1176 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1177 is set, but not to INDEX, make a copy of this type with the requested
1178 index type. Note that we have no way of sharing these types, but that's
1179 only a small hole. */
1180 if (TYPE_INDEX_TYPE (type) == index)
1182 else if (TYPE_INDEX_TYPE (type))
1183 type = copy_type (type);
1185 SET_TYPE_INDEX_TYPE (type, index);
1186 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1190 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1191 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1195 create_type_stub_decl (tree type_name, tree type)
1197 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1198 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1199 emitted in DWARF. */
1200 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1201 DECL_ARTIFICIAL (type_decl) = 1;
1205 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1206 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1207 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1208 true if we need to write debug information about this type. GNAT_NODE
1209 is used for the position of the decl. */
1212 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1213 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1215 enum tree_code code = TREE_CODE (type);
1216 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1219 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1220 gcc_assert (!TYPE_IS_DUMMY_P (type));
1222 /* If the type hasn't been named yet, we're naming it; preserve an existing
1223 TYPE_STUB_DECL that has been attached to it for some purpose. */
1224 if (!named && TYPE_STUB_DECL (type))
1226 type_decl = TYPE_STUB_DECL (type);
1227 DECL_NAME (type_decl) = type_name;
1230 type_decl = build_decl (TYPE_DECL, type_name, type);
1232 DECL_ARTIFICIAL (type_decl) = artificial_p;
1233 gnat_pushdecl (type_decl, gnat_node);
1234 process_attributes (type_decl, attr_list);
1236 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1237 This causes the name to be also viewed as a "tag" by the debug
1238 back-end, with the advantage that no DW_TAG_typedef is emitted
1239 for artificial "tagged" types in DWARF. */
1241 TYPE_STUB_DECL (type) = type_decl;
1243 /* Pass the type declaration to the debug back-end unless this is an
1244 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1245 type for which debugging information was not requested, or else an
1246 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1247 handled separately. And do not pass dummy types either. */
1248 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1249 DECL_IGNORED_P (type_decl) = 1;
1250 else if (code != ENUMERAL_TYPE
1251 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1252 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1253 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1254 && !(code == RECORD_TYPE
1256 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1257 rest_of_type_decl_compilation (type_decl);
1262 /* Return a VAR_DECL or CONST_DECL node.
1264 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1265 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1266 the GCC tree for an optional initial expression; NULL_TREE if none.
1268 CONST_FLAG is true if this variable is constant, in which case we might
1269 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1271 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1272 definition to be made visible outside of the current compilation unit, for
1273 instance variable definitions in a package specification.
1275 EXTERN_FLAG is true when processing an external variable declaration (as
1276 opposed to a definition: no storage is to be allocated for the variable).
1278 STATIC_FLAG is only relevant when not at top level. In that case
1279 it indicates whether to always allocate storage to the variable.
1281 GNAT_NODE is used for the position of the decl. */
1284 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1285 bool const_flag, bool public_flag, bool extern_flag,
1286 bool static_flag, bool const_decl_allowed_p,
1287 struct attrib *attr_list, Node_Id gnat_node)
1291 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1292 && (global_bindings_p () || static_flag
1293 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1294 : TREE_CONSTANT (var_init)));
1296 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1297 case the initializer may be used in-lieu of the DECL node (as done in
1298 Identifier_to_gnu). This is useful to prevent the need of elaboration
1299 code when an identifier for which such a decl is made is in turn used as
1300 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1301 but extra constraints apply to this choice (see below) and are not
1302 relevant to the distinction we wish to make. */
1303 bool constant_p = const_flag && init_const;
1305 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1306 and may be used for scalars in general but not for aggregates. */
1308 = build_decl ((constant_p && const_decl_allowed_p
1309 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1312 /* If this is external, throw away any initializations (they will be done
1313 elsewhere) unless this is a constant for which we would like to remain
1314 able to get the initializer. If we are defining a global here, leave a
1315 constant initialization and save any variable elaborations for the
1316 elaboration routine. If we are just annotating types, throw away the
1317 initialization if it isn't a constant. */
1318 if ((extern_flag && !constant_p)
1319 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1320 var_init = NULL_TREE;
1322 /* At the global level, an initializer requiring code to be generated
1323 produces elaboration statements. Check that such statements are allowed,
1324 that is, not violating a No_Elaboration_Code restriction. */
1325 if (global_bindings_p () && var_init != 0 && ! init_const)
1326 Check_Elaboration_Code_Allowed (gnat_node);
1328 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1329 try to fiddle with DECL_COMMON. However, on platforms that don't
1330 support global BSS sections, uninitialized global variables would
1331 go in DATA instead, thus increasing the size of the executable. */
1333 && TREE_CODE (var_decl) == VAR_DECL
1334 && !have_global_bss_p ())
1335 DECL_COMMON (var_decl) = 1;
1336 DECL_INITIAL (var_decl) = var_init;
1337 TREE_READONLY (var_decl) = const_flag;
1338 DECL_EXTERNAL (var_decl) = extern_flag;
1339 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1340 TREE_CONSTANT (var_decl) = constant_p;
1341 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1342 = TYPE_VOLATILE (type);
1344 /* If it's public and not external, always allocate storage for it.
1345 At the global binding level we need to allocate static storage for the
1346 variable if and only if it's not external. If we are not at the top level
1347 we allocate automatic storage unless requested not to. */
1348 TREE_STATIC (var_decl)
1349 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1351 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1352 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1354 process_attributes (var_decl, attr_list);
1356 /* Add this decl to the current binding level. */
1357 gnat_pushdecl (var_decl, gnat_node);
1359 if (TREE_SIDE_EFFECTS (var_decl))
1360 TREE_ADDRESSABLE (var_decl) = 1;
1362 if (TREE_CODE (var_decl) != CONST_DECL)
1364 if (global_bindings_p ())
1365 rest_of_decl_compilation (var_decl, true, 0);
1368 expand_decl (var_decl);
1373 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1376 aggregate_type_contains_array_p (tree type)
1378 switch (TREE_CODE (type))
1382 case QUAL_UNION_TYPE:
1385 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1386 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1387 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1400 /* Return a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1401 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1402 this field is in a record type with a "pragma pack". If SIZE is nonzero
1403 it is the specified size for this field. If POS is nonzero, it is the bit
1404 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1405 the address of this field for aliasing purposes. If it is negative, we
1406 should not make a bitfield, which is used by make_aligning_type. */
1409 create_field_decl (tree field_name, tree field_type, tree record_type,
1410 int packed, tree size, tree pos, int addressable)
1412 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1414 DECL_CONTEXT (field_decl) = record_type;
1415 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1417 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1418 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1419 Likewise for an aggregate without specified position that contains an
1420 array, because in this case slices of variable length of this array
1421 must be handled by GCC and variable-sized objects need to be aligned
1422 to at least a byte boundary. */
1423 if (packed && (TYPE_MODE (field_type) == BLKmode
1425 && AGGREGATE_TYPE_P (field_type)
1426 && aggregate_type_contains_array_p (field_type))))
1427 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1429 /* If a size is specified, use it. Otherwise, if the record type is packed
1430 compute a size to use, which may differ from the object's natural size.
1431 We always set a size in this case to trigger the checks for bitfield
1432 creation below, which is typically required when no position has been
1435 size = convert (bitsizetype, size);
1436 else if (packed == 1)
1438 size = rm_size (field_type);
1440 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1442 if (TREE_CODE (size) == INTEGER_CST
1443 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1444 size = round_up (size, BITS_PER_UNIT);
1447 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1448 specified for two reasons: first if the size differs from the natural
1449 size. Second, if the alignment is insufficient. There are a number of
1450 ways the latter can be true.
1452 We never make a bitfield if the type of the field has a nonconstant size,
1453 because no such entity requiring bitfield operations should reach here.
1455 We do *preventively* make a bitfield when there might be the need for it
1456 but we don't have all the necessary information to decide, as is the case
1457 of a field with no specified position in a packed record.
1459 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1460 in layout_decl or finish_record_type to clear the bit_field indication if
1461 it is in fact not needed. */
1462 if (addressable >= 0
1464 && TREE_CODE (size) == INTEGER_CST
1465 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1466 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1467 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1469 || (TYPE_ALIGN (record_type) != 0
1470 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1472 DECL_BIT_FIELD (field_decl) = 1;
1473 DECL_SIZE (field_decl) = size;
1474 if (!packed && !pos)
1475 DECL_ALIGN (field_decl)
1476 = (TYPE_ALIGN (record_type) != 0
1477 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1478 : TYPE_ALIGN (field_type));
1481 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1483 /* Bump the alignment if need be, either for bitfield/packing purposes or
1484 to satisfy the type requirements if no such consideration applies. When
1485 we get the alignment from the type, indicate if this is from an explicit
1486 user request, which prevents stor-layout from lowering it later on. */
1488 unsigned int bit_align
1489 = (DECL_BIT_FIELD (field_decl) ? 1
1490 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1492 if (bit_align > DECL_ALIGN (field_decl))
1493 DECL_ALIGN (field_decl) = bit_align;
1494 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1496 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1497 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1503 /* We need to pass in the alignment the DECL is known to have.
1504 This is the lowest-order bit set in POS, but no more than
1505 the alignment of the record, if one is specified. Note
1506 that an alignment of 0 is taken as infinite. */
1507 unsigned int known_align;
1509 if (host_integerp (pos, 1))
1510 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1512 known_align = BITS_PER_UNIT;
1514 if (TYPE_ALIGN (record_type)
1515 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1516 known_align = TYPE_ALIGN (record_type);
1518 layout_decl (field_decl, known_align);
1519 SET_DECL_OFFSET_ALIGN (field_decl,
1520 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1522 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1523 &DECL_FIELD_BIT_OFFSET (field_decl),
1524 DECL_OFFSET_ALIGN (field_decl), pos);
1527 /* In addition to what our caller says, claim the field is addressable if we
1528 know that its type is not suitable.
1530 The field may also be "technically" nonaddressable, meaning that even if
1531 we attempt to take the field's address we will actually get the address
1532 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1533 value we have at this point is not accurate enough, so we don't account
1534 for this here and let finish_record_type decide. */
1535 if (!addressable && !type_for_nonaliased_component_p (field_type))
1538 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1543 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1544 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1545 (either an In parameter or an address of a pass-by-ref parameter). */
1548 create_param_decl (tree param_name, tree param_type, bool readonly)
1550 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1552 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1553 can lead to various ABI violations. */
1554 if (targetm.calls.promote_prototypes (NULL_TREE)
1555 && INTEGRAL_TYPE_P (param_type)
1556 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1558 /* We have to be careful about biased types here. Make a subtype
1559 of integer_type_node with the proper biasing. */
1560 if (TREE_CODE (param_type) == INTEGER_TYPE
1561 && TYPE_BIASED_REPRESENTATION_P (param_type))
1564 = copy_type (build_range_type (integer_type_node,
1565 TYPE_MIN_VALUE (param_type),
1566 TYPE_MAX_VALUE (param_type)));
1568 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1571 param_type = integer_type_node;
1574 DECL_ARG_TYPE (param_decl) = param_type;
1575 TREE_READONLY (param_decl) = readonly;
1579 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1582 process_attributes (tree decl, struct attrib *attr_list)
1584 for (; attr_list; attr_list = attr_list->next)
1585 switch (attr_list->type)
1587 case ATTR_MACHINE_ATTRIBUTE:
1588 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1590 ATTR_FLAG_TYPE_IN_PLACE);
1593 case ATTR_LINK_ALIAS:
1594 if (! DECL_EXTERNAL (decl))
1596 TREE_STATIC (decl) = 1;
1597 assemble_alias (decl, attr_list->name);
1601 case ATTR_WEAK_EXTERNAL:
1603 declare_weak (decl);
1605 post_error ("?weak declarations not supported on this target",
1606 attr_list->error_point);
1609 case ATTR_LINK_SECTION:
1610 if (targetm.have_named_sections)
1612 DECL_SECTION_NAME (decl)
1613 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1614 IDENTIFIER_POINTER (attr_list->name));
1615 DECL_COMMON (decl) = 0;
1618 post_error ("?section attributes are not supported for this target",
1619 attr_list->error_point);
1622 case ATTR_LINK_CONSTRUCTOR:
1623 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1624 TREE_USED (decl) = 1;
1627 case ATTR_LINK_DESTRUCTOR:
1628 DECL_STATIC_DESTRUCTOR (decl) = 1;
1629 TREE_USED (decl) = 1;
1632 case ATTR_THREAD_LOCAL_STORAGE:
1633 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1634 DECL_COMMON (decl) = 0;
1639 /* Record a global renaming pointer. */
1642 record_global_renaming_pointer (tree decl)
1644 gcc_assert (DECL_RENAMED_OBJECT (decl));
1645 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1648 /* Invalidate the global renaming pointers. */
1651 invalidate_global_renaming_pointers (void)
1656 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1657 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1659 VEC_free (tree, gc, global_renaming_pointers);
1662 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1666 value_factor_p (tree value, HOST_WIDE_INT factor)
1668 if (host_integerp (value, 1))
1669 return tree_low_cst (value, 1) % factor == 0;
1671 if (TREE_CODE (value) == MULT_EXPR)
1672 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1673 || value_factor_p (TREE_OPERAND (value, 1), factor));
1678 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1679 unless we can prove these 2 fields are laid out in such a way that no gap
1680 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1681 is the distance in bits between the end of PREV_FIELD and the starting
1682 position of CURR_FIELD. It is ignored if null. */
1685 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1687 /* If this is the first field of the record, there cannot be any gap */
1691 /* If the previous field is a union type, then return False: The only
1692 time when such a field is not the last field of the record is when
1693 there are other components at fixed positions after it (meaning there
1694 was a rep clause for every field), in which case we don't want the
1695 alignment constraint to override them. */
1696 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1699 /* If the distance between the end of prev_field and the beginning of
1700 curr_field is constant, then there is a gap if the value of this
1701 constant is not null. */
1702 if (offset && host_integerp (offset, 1))
1703 return !integer_zerop (offset);
1705 /* If the size and position of the previous field are constant,
1706 then check the sum of this size and position. There will be a gap
1707 iff it is not multiple of the current field alignment. */
1708 if (host_integerp (DECL_SIZE (prev_field), 1)
1709 && host_integerp (bit_position (prev_field), 1))
1710 return ((tree_low_cst (bit_position (prev_field), 1)
1711 + tree_low_cst (DECL_SIZE (prev_field), 1))
1712 % DECL_ALIGN (curr_field) != 0);
1714 /* If both the position and size of the previous field are multiples
1715 of the current field alignment, there cannot be any gap. */
1716 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1717 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1720 /* Fallback, return that there may be a potential gap */
1724 /* Returns a LABEL_DECL node for LABEL_NAME. */
1727 create_label_decl (tree label_name)
1729 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1731 DECL_CONTEXT (label_decl) = current_function_decl;
1732 DECL_MODE (label_decl) = VOIDmode;
1733 DECL_SOURCE_LOCATION (label_decl) = input_location;
1738 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1739 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1740 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1741 PARM_DECL nodes chained through the TREE_CHAIN field).
1743 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1744 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1747 create_subprog_decl (tree subprog_name, tree asm_name,
1748 tree subprog_type, tree param_decl_list, bool inline_flag,
1749 bool public_flag, bool extern_flag,
1750 struct attrib *attr_list, Node_Id gnat_node)
1752 tree return_type = TREE_TYPE (subprog_type);
1753 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1755 /* If this is a non-inline function nested inside an inlined external
1756 function, we cannot honor both requests without cloning the nested
1757 function in the current unit since it is private to the other unit.
1758 We could inline the nested function as well but it's probably better
1759 to err on the side of too little inlining. */
1761 && current_function_decl
1762 && DECL_DECLARED_INLINE_P (current_function_decl)
1763 && DECL_EXTERNAL (current_function_decl))
1764 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1766 DECL_EXTERNAL (subprog_decl) = extern_flag;
1767 TREE_PUBLIC (subprog_decl) = public_flag;
1768 TREE_STATIC (subprog_decl) = 1;
1769 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1770 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1771 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1772 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1773 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1774 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1775 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1776 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1778 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1779 target by-reference return mechanism. This is not supported all the
1780 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1781 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1782 the RESULT_DECL instead - see gnat_genericize for more details. */
1783 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1785 tree result_decl = DECL_RESULT (subprog_decl);
1787 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1788 DECL_BY_REFERENCE (result_decl) = 1;
1793 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1795 /* The expand_main_function circuitry expects "main_identifier_node" to
1796 designate the DECL_NAME of the 'main' entry point, in turn expected
1797 to be declared as the "main" function literally by default. Ada
1798 program entry points are typically declared with a different name
1799 within the binder generated file, exported as 'main' to satisfy the
1800 system expectations. Redirect main_identifier_node in this case. */
1801 if (asm_name == main_identifier_node)
1802 main_identifier_node = DECL_NAME (subprog_decl);
1805 process_attributes (subprog_decl, attr_list);
1807 /* Add this decl to the current binding level. */
1808 gnat_pushdecl (subprog_decl, gnat_node);
1810 /* Output the assembler code and/or RTL for the declaration. */
1811 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1813 return subprog_decl;
1816 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1817 body. This routine needs to be invoked before processing the declarations
1818 appearing in the subprogram. */
1821 begin_subprog_body (tree subprog_decl)
1825 current_function_decl = subprog_decl;
1826 announce_function (subprog_decl);
1828 /* Enter a new binding level and show that all the parameters belong to
1831 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1832 param_decl = TREE_CHAIN (param_decl))
1833 DECL_CONTEXT (param_decl) = subprog_decl;
1835 make_decl_rtl (subprog_decl);
1837 /* We handle pending sizes via the elaboration of types, so we don't need to
1838 save them. This causes them to be marked as part of the outer function
1839 and then discarded. */
1840 get_pending_sizes ();
1844 /* Helper for the genericization callback. Return a dereference of VAL
1845 if it is of a reference type. */
1848 convert_from_reference (tree val)
1850 tree value_type, ref;
1852 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1855 value_type = TREE_TYPE (TREE_TYPE (val));
1856 ref = build1 (INDIRECT_REF, value_type, val);
1858 /* See if what we reference is CONST or VOLATILE, which requires
1859 looking into array types to get to the component type. */
1861 while (TREE_CODE (value_type) == ARRAY_TYPE)
1862 value_type = TREE_TYPE (value_type);
1865 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1866 TREE_THIS_VOLATILE (ref)
1867 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1869 TREE_SIDE_EFFECTS (ref)
1870 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1875 /* Helper for the genericization callback. Returns true if T denotes
1876 a RESULT_DECL with DECL_BY_REFERENCE set. */
1879 is_byref_result (tree t)
1881 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1885 /* Tree walking callback for gnat_genericize. Currently ...
1887 o Adjust references to the function's DECL_RESULT if it is marked
1888 DECL_BY_REFERENCE and so has had its type turned into a reference
1889 type at the end of the function compilation. */
1892 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1894 /* This implementation is modeled after what the C++ front-end is
1895 doing, basis of the downstream passes behavior. */
1897 tree stmt = *stmt_p;
1898 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1900 /* If we have a direct mention of the result decl, dereference. */
1901 if (is_byref_result (stmt))
1903 *stmt_p = convert_from_reference (stmt);
1908 /* Otherwise, no need to walk the same tree twice. */
1909 if (pointer_set_contains (p_set, stmt))
1915 /* If we are taking the address of what now is a reference, just get the
1917 if (TREE_CODE (stmt) == ADDR_EXPR
1918 && is_byref_result (TREE_OPERAND (stmt, 0)))
1920 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1924 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1925 else if (TREE_CODE (stmt) == RETURN_EXPR
1926 && TREE_OPERAND (stmt, 0)
1927 && is_byref_result (TREE_OPERAND (stmt, 0)))
1930 /* Don't look inside trees that cannot embed references of interest. */
1931 else if (IS_TYPE_OR_DECL_P (stmt))
1934 pointer_set_insert (p_set, *stmt_p);
1939 /* Perform lowering of Ada trees to GENERIC. In particular:
1941 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
1942 and adjust all the references to this decl accordingly. */
1945 gnat_genericize (tree fndecl)
1947 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
1948 was handled by simply setting TREE_ADDRESSABLE on the result type.
1949 Everything required to actually pass by invisible ref using the target
1950 mechanism (e.g. extra parameter) was handled at RTL expansion time.
1952 This doesn't work with GCC 4 any more for several reasons. First, the
1953 gimplification process might need the creation of temporaries of this
1954 type, and the gimplifier ICEs on such attempts. Second, the middle-end
1955 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
1956 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
1957 be explicitly accounted for by the front-end in the function body.
1959 We achieve the complete transformation in two steps:
1961 1/ create_subprog_decl performs early attribute tweaks: it clears
1962 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
1963 the result decl. The former ensures that the bit isn't set in the GCC
1964 tree saved for the function, so prevents ICEs on temporary creation.
1965 The latter we use here to trigger the rest of the processing.
1967 2/ This function performs the type transformation on the result decl
1968 and adjusts all the references to this decl from the function body
1971 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
1972 strategy, which escapes the gimplifier temporary creation issues by
1973 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
1974 on simple specific support code in aggregate_value_p to look at the
1975 target function result decl explicitly. */
1977 struct pointer_set_t *p_set;
1978 tree decl_result = DECL_RESULT (fndecl);
1980 if (!DECL_BY_REFERENCE (decl_result))
1983 /* Make the DECL_RESULT explicitly by-reference and adjust all the
1984 occurrences in the function body using the common tree-walking facility.
1985 We want to see every occurrence of the result decl to adjust the
1986 referencing tree, so need to use our own pointer set to control which
1987 trees should be visited again or not. */
1989 p_set = pointer_set_create ();
1991 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
1992 TREE_ADDRESSABLE (decl_result) = 0;
1993 relayout_decl (decl_result);
1995 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
1997 pointer_set_destroy (p_set);
2000 /* Finish the definition of the current subprogram BODY and compile it all the
2001 way to assembler language output. ELAB_P tells if this is called for an
2002 elaboration routine, to be entirely discarded if empty. */
2005 end_subprog_body (tree body, bool elab_p)
2007 tree fndecl = current_function_decl;
2009 /* Mark the BLOCK for this level as being for this function and pop the
2010 level. Since the vars in it are the parameters, clear them. */
2011 BLOCK_VARS (current_binding_level->block) = 0;
2012 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2013 DECL_INITIAL (fndecl) = current_binding_level->block;
2016 /* We handle pending sizes via the elaboration of types, so we don't
2017 need to save them. */
2018 get_pending_sizes ();
2020 /* Mark the RESULT_DECL as being in this subprogram. */
2021 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2023 DECL_SAVED_TREE (fndecl) = body;
2025 current_function_decl = DECL_CONTEXT (fndecl);
2028 /* We cannot track the location of errors past this point. */
2029 error_gnat_node = Empty;
2031 /* If we're only annotating types, don't actually compile this function. */
2032 if (type_annotate_only)
2035 /* Perform the required pre-gimplification transformations on the tree. */
2036 gnat_genericize (fndecl);
2038 /* We do different things for nested and non-nested functions.
2039 ??? This should be in cgraph. */
2040 if (!DECL_CONTEXT (fndecl))
2042 gnat_gimplify_function (fndecl);
2044 /* If this is an empty elaboration proc, just discard the node.
2045 Otherwise, compile further. */
2046 if (elab_p && empty_body_p (gimple_body (fndecl)))
2047 cgraph_remove_node (cgraph_node (fndecl));
2049 cgraph_finalize_function (fndecl, false);
2052 /* Register this function with cgraph just far enough to get it
2053 added to our parent's nested function list. */
2054 (void) cgraph_node (fndecl);
2057 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2060 gnat_gimplify_function (tree fndecl)
2062 struct cgraph_node *cgn;
2064 dump_function (TDI_original, fndecl);
2065 gimplify_function_tree (fndecl);
2066 dump_function (TDI_generic, fndecl);
2068 /* Convert all nested functions to GIMPLE now. We do things in this order
2069 so that items like VLA sizes are expanded properly in the context of the
2070 correct function. */
2071 cgn = cgraph_node (fndecl);
2072 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2073 gnat_gimplify_function (cgn->decl);
2077 gnat_builtin_function (tree decl)
2079 gnat_pushdecl (decl, Empty);
2083 /* Return an integer type with the number of bits of precision given by
2084 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2085 it is a signed type. */
2088 gnat_type_for_size (unsigned precision, int unsignedp)
2093 if (precision <= 2 * MAX_BITS_PER_WORD
2094 && signed_and_unsigned_types[precision][unsignedp])
2095 return signed_and_unsigned_types[precision][unsignedp];
2098 t = make_unsigned_type (precision);
2100 t = make_signed_type (precision);
2102 if (precision <= 2 * MAX_BITS_PER_WORD)
2103 signed_and_unsigned_types[precision][unsignedp] = t;
2107 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2108 TYPE_NAME (t) = get_identifier (type_name);
2114 /* Likewise for floating-point types. */
2117 float_type_for_precision (int precision, enum machine_mode mode)
2122 if (float_types[(int) mode])
2123 return float_types[(int) mode];
2125 float_types[(int) mode] = t = make_node (REAL_TYPE);
2126 TYPE_PRECISION (t) = precision;
2129 gcc_assert (TYPE_MODE (t) == mode);
2132 sprintf (type_name, "FLOAT_%d", precision);
2133 TYPE_NAME (t) = get_identifier (type_name);
2139 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2140 an unsigned type; otherwise a signed type is returned. */
2143 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2145 if (mode == BLKmode)
2147 else if (mode == VOIDmode)
2148 return void_type_node;
2149 else if (COMPLEX_MODE_P (mode))
2151 else if (SCALAR_FLOAT_MODE_P (mode))
2152 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2153 else if (SCALAR_INT_MODE_P (mode))
2154 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2159 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2162 gnat_unsigned_type (tree type_node)
2164 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2166 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2168 type = copy_node (type);
2169 TREE_TYPE (type) = type_node;
2171 else if (TREE_TYPE (type_node)
2172 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2173 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2175 type = copy_node (type);
2176 TREE_TYPE (type) = TREE_TYPE (type_node);
2182 /* Return the signed version of a TYPE_NODE, a scalar type. */
2185 gnat_signed_type (tree type_node)
2187 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2189 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2191 type = copy_node (type);
2192 TREE_TYPE (type) = type_node;
2194 else if (TREE_TYPE (type_node)
2195 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2196 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2198 type = copy_node (type);
2199 TREE_TYPE (type) = TREE_TYPE (type_node);
2205 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2206 transparently converted to each other. */
2209 gnat_types_compatible_p (tree t1, tree t2)
2211 enum tree_code code;
2213 /* This is the default criterion. */
2214 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2217 /* We only check structural equivalence here. */
2218 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2221 /* Array types are also compatible if they are constrained and have
2222 the same component type and the same domain. */
2223 if (code == ARRAY_TYPE
2224 && TREE_TYPE (t1) == TREE_TYPE (t2)
2225 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2226 || (TYPE_DOMAIN (t1)
2228 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2229 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2230 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2231 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2234 /* Padding record types are also compatible if they pad the same
2235 type and have the same constant size. */
2236 if (code == RECORD_TYPE
2237 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2238 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2239 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2245 /* EXP is an expression for the size of an object. If this size contains
2246 discriminant references, replace them with the maximum (if MAX_P) or
2247 minimum (if !MAX_P) possible value of the discriminant. */
2250 max_size (tree exp, bool max_p)
2252 enum tree_code code = TREE_CODE (exp);
2253 tree type = TREE_TYPE (exp);
2255 switch (TREE_CODE_CLASS (code))
2257 case tcc_declaration:
2262 if (code == CALL_EXPR)
2265 int i, n = call_expr_nargs (exp);
2268 argarray = (tree *) alloca (n * sizeof (tree));
2269 for (i = 0; i < n; i++)
2270 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2271 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2276 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2277 modify. Otherwise, we treat it like a variable. */
2278 if (!CONTAINS_PLACEHOLDER_P (exp))
2281 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2283 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2285 case tcc_comparison:
2286 return max_p ? size_one_node : size_zero_node;
2290 case tcc_expression:
2291 switch (TREE_CODE_LENGTH (code))
2294 if (code == NON_LVALUE_EXPR)
2295 return max_size (TREE_OPERAND (exp, 0), max_p);
2298 fold_build1 (code, type,
2299 max_size (TREE_OPERAND (exp, 0),
2300 code == NEGATE_EXPR ? !max_p : max_p));
2303 if (code == COMPOUND_EXPR)
2304 return max_size (TREE_OPERAND (exp, 1), max_p);
2306 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2307 may provide a tighter bound on max_size. */
2308 if (code == MINUS_EXPR
2309 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2311 tree lhs = fold_build2 (MINUS_EXPR, type,
2312 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2313 TREE_OPERAND (exp, 1));
2314 tree rhs = fold_build2 (MINUS_EXPR, type,
2315 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2316 TREE_OPERAND (exp, 1));
2317 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2318 max_size (lhs, max_p),
2319 max_size (rhs, max_p));
2323 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2324 tree rhs = max_size (TREE_OPERAND (exp, 1),
2325 code == MINUS_EXPR ? !max_p : max_p);
2327 /* Special-case wanting the maximum value of a MIN_EXPR.
2328 In that case, if one side overflows, return the other.
2329 sizetype is signed, but we know sizes are non-negative.
2330 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2331 overflowing or the maximum possible value and the RHS
2335 && TREE_CODE (rhs) == INTEGER_CST
2336 && TREE_OVERFLOW (rhs))
2340 && TREE_CODE (lhs) == INTEGER_CST
2341 && TREE_OVERFLOW (lhs))
2343 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2344 && ((TREE_CODE (lhs) == INTEGER_CST
2345 && TREE_OVERFLOW (lhs))
2346 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2347 && !TREE_CONSTANT (rhs))
2350 return fold_build2 (code, type, lhs, rhs);
2354 if (code == SAVE_EXPR)
2356 else if (code == COND_EXPR)
2357 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2358 max_size (TREE_OPERAND (exp, 1), max_p),
2359 max_size (TREE_OPERAND (exp, 2), max_p));
2362 /* Other tree classes cannot happen. */
2370 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2371 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2372 Return a constructor for the template. */
2375 build_template (tree template_type, tree array_type, tree expr)
2377 tree template_elts = NULL_TREE;
2378 tree bound_list = NULL_TREE;
2381 while (TREE_CODE (array_type) == RECORD_TYPE
2382 && (TYPE_IS_PADDING_P (array_type)
2383 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2384 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2386 if (TREE_CODE (array_type) == ARRAY_TYPE
2387 || (TREE_CODE (array_type) == INTEGER_TYPE
2388 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2389 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2391 /* First make the list for a CONSTRUCTOR for the template. Go down the
2392 field list of the template instead of the type chain because this
2393 array might be an Ada array of arrays and we can't tell where the
2394 nested arrays stop being the underlying object. */
2396 for (field = TYPE_FIELDS (template_type); field;
2398 ? (bound_list = TREE_CHAIN (bound_list))
2399 : (array_type = TREE_TYPE (array_type))),
2400 field = TREE_CHAIN (TREE_CHAIN (field)))
2402 tree bounds, min, max;
2404 /* If we have a bound list, get the bounds from there. Likewise
2405 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2406 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2407 This will give us a maximum range. */
2409 bounds = TREE_VALUE (bound_list);
2410 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2411 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2412 else if (expr && TREE_CODE (expr) == PARM_DECL
2413 && DECL_BY_COMPONENT_PTR_P (expr))
2414 bounds = TREE_TYPE (field);
2418 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2419 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2421 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2422 substitute it from OBJECT. */
2423 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2424 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2426 template_elts = tree_cons (TREE_CHAIN (field), max,
2427 tree_cons (field, min, template_elts));
2430 return gnat_build_constructor (template_type, nreverse (template_elts));
2433 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2434 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2435 in the type contains in its DECL_INITIAL the expression to use when
2436 a constructor is made for the type. GNAT_ENTITY is an entity used
2437 to print out an error message if the mechanism cannot be applied to
2438 an object of that type and also for the name. */
2441 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2443 tree record_type = make_node (RECORD_TYPE);
2444 tree pointer32_type;
2445 tree field_list = 0;
2454 /* If TYPE is an unconstrained array, use the underlying array type. */
2455 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2456 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2458 /* If this is an array, compute the number of dimensions in the array,
2459 get the index types, and point to the inner type. */
2460 if (TREE_CODE (type) != ARRAY_TYPE)
2463 for (ndim = 1, inner_type = type;
2464 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2465 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2466 ndim++, inner_type = TREE_TYPE (inner_type))
2469 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2471 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2472 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2473 for (i = ndim - 1, inner_type = type;
2475 i--, inner_type = TREE_TYPE (inner_type))
2476 idx_arr[i] = TYPE_DOMAIN (inner_type);
2478 for (i = 0, inner_type = type;
2480 i++, inner_type = TREE_TYPE (inner_type))
2481 idx_arr[i] = TYPE_DOMAIN (inner_type);
2483 /* Now get the DTYPE value. */
2484 switch (TREE_CODE (type))
2489 if (TYPE_VAX_FLOATING_POINT_P (type))
2490 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2503 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2506 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2509 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2512 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2515 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2518 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2524 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2528 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2529 && TYPE_VAX_FLOATING_POINT_P (type))
2530 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2542 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2553 /* Get the CLASS value. */
2556 case By_Descriptor_A:
2557 case By_Short_Descriptor_A:
2560 case By_Descriptor_NCA:
2561 case By_Short_Descriptor_NCA:
2564 case By_Descriptor_SB:
2565 case By_Short_Descriptor_SB:
2569 case By_Short_Descriptor:
2570 case By_Descriptor_S:
2571 case By_Short_Descriptor_S:
2577 /* Make the type for a descriptor for VMS. The first four fields
2578 are the same for all types. */
2581 = chainon (field_list,
2582 make_descriptor_field
2583 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2584 size_in_bytes ((mech == By_Descriptor_A ||
2585 mech == By_Short_Descriptor_A)
2586 ? inner_type : type)));
2588 field_list = chainon (field_list,
2589 make_descriptor_field ("DTYPE",
2590 gnat_type_for_size (8, 1),
2591 record_type, size_int (dtype)));
2592 field_list = chainon (field_list,
2593 make_descriptor_field ("CLASS",
2594 gnat_type_for_size (8, 1),
2595 record_type, size_int (class)));
2597 /* Of course this will crash at run-time if the address space is not
2598 within the low 32 bits, but there is nothing else we can do. */
2599 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2602 = chainon (field_list,
2603 make_descriptor_field
2604 ("POINTER", pointer32_type, record_type,
2605 build_unary_op (ADDR_EXPR,
2607 build0 (PLACEHOLDER_EXPR, type))));
2612 case By_Short_Descriptor:
2613 case By_Descriptor_S:
2614 case By_Short_Descriptor_S:
2617 case By_Descriptor_SB:
2618 case By_Short_Descriptor_SB:
2620 = chainon (field_list,
2621 make_descriptor_field
2622 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2623 TREE_CODE (type) == ARRAY_TYPE
2624 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2626 = chainon (field_list,
2627 make_descriptor_field
2628 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2629 TREE_CODE (type) == ARRAY_TYPE
2630 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2633 case By_Descriptor_A:
2634 case By_Short_Descriptor_A:
2635 case By_Descriptor_NCA:
2636 case By_Short_Descriptor_NCA:
2637 field_list = chainon (field_list,
2638 make_descriptor_field ("SCALE",
2639 gnat_type_for_size (8, 1),
2643 field_list = chainon (field_list,
2644 make_descriptor_field ("DIGITS",
2645 gnat_type_for_size (8, 1),
2650 = chainon (field_list,
2651 make_descriptor_field
2652 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2653 size_int ((mech == By_Descriptor_NCA ||
2654 mech == By_Short_Descriptor_NCA)
2656 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2657 : (TREE_CODE (type) == ARRAY_TYPE
2658 && TYPE_CONVENTION_FORTRAN_P (type)
2661 field_list = chainon (field_list,
2662 make_descriptor_field ("DIMCT",
2663 gnat_type_for_size (8, 1),
2667 field_list = chainon (field_list,
2668 make_descriptor_field ("ARSIZE",
2669 gnat_type_for_size (32, 1),
2671 size_in_bytes (type)));
2673 /* Now build a pointer to the 0,0,0... element. */
2674 tem = build0 (PLACEHOLDER_EXPR, type);
2675 for (i = 0, inner_type = type; i < ndim;
2676 i++, inner_type = TREE_TYPE (inner_type))
2677 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2678 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2679 NULL_TREE, NULL_TREE);
2682 = chainon (field_list,
2683 make_descriptor_field
2685 build_pointer_type_for_mode (inner_type, SImode, false),
2688 build_pointer_type_for_mode (inner_type, SImode,
2692 /* Next come the addressing coefficients. */
2693 tem = size_one_node;
2694 for (i = 0; i < ndim; i++)
2698 = size_binop (MULT_EXPR, tem,
2699 size_binop (PLUS_EXPR,
2700 size_binop (MINUS_EXPR,
2701 TYPE_MAX_VALUE (idx_arr[i]),
2702 TYPE_MIN_VALUE (idx_arr[i])),
2705 fname[0] = ((mech == By_Descriptor_NCA ||
2706 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2707 fname[1] = '0' + i, fname[2] = 0;
2709 = chainon (field_list,
2710 make_descriptor_field (fname,
2711 gnat_type_for_size (32, 1),
2712 record_type, idx_length));
2714 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2718 /* Finally here are the bounds. */
2719 for (i = 0; i < ndim; i++)
2723 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2725 = chainon (field_list,
2726 make_descriptor_field
2727 (fname, gnat_type_for_size (32, 1), record_type,
2728 TYPE_MIN_VALUE (idx_arr[i])));
2732 = chainon (field_list,
2733 make_descriptor_field
2734 (fname, gnat_type_for_size (32, 1), record_type,
2735 TYPE_MAX_VALUE (idx_arr[i])));
2740 post_error ("unsupported descriptor type for &", gnat_entity);
2743 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2744 finish_record_type (record_type, field_list, 0, true);
2748 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2749 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2750 in the type contains in its DECL_INITIAL the expression to use when
2751 a constructor is made for the type. GNAT_ENTITY is an entity used
2752 to print out an error message if the mechanism cannot be applied to
2753 an object of that type and also for the name. */
2756 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2758 tree record64_type = make_node (RECORD_TYPE);
2759 tree pointer64_type;
2760 tree field_list64 = 0;
2769 /* If TYPE is an unconstrained array, use the underlying array type. */
2770 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2771 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2773 /* If this is an array, compute the number of dimensions in the array,
2774 get the index types, and point to the inner type. */
2775 if (TREE_CODE (type) != ARRAY_TYPE)
2778 for (ndim = 1, inner_type = type;
2779 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2780 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2781 ndim++, inner_type = TREE_TYPE (inner_type))
2784 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2786 if (mech != By_Descriptor_NCA
2787 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2788 for (i = ndim - 1, inner_type = type;
2790 i--, inner_type = TREE_TYPE (inner_type))
2791 idx_arr[i] = TYPE_DOMAIN (inner_type);
2793 for (i = 0, inner_type = type;
2795 i++, inner_type = TREE_TYPE (inner_type))
2796 idx_arr[i] = TYPE_DOMAIN (inner_type);
2798 /* Now get the DTYPE value. */
2799 switch (TREE_CODE (type))
2804 if (TYPE_VAX_FLOATING_POINT_P (type))
2805 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2818 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2821 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2824 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2827 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2830 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2833 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2839 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2843 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2844 && TYPE_VAX_FLOATING_POINT_P (type))
2845 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2857 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2868 /* Get the CLASS value. */
2871 case By_Descriptor_A:
2874 case By_Descriptor_NCA:
2877 case By_Descriptor_SB:
2881 case By_Descriptor_S:
2887 /* Make the type for a 64bit descriptor for VMS. The first six fields
2888 are the same for all types. */
2890 field_list64 = chainon (field_list64,
2891 make_descriptor_field ("MBO",
2892 gnat_type_for_size (16, 1),
2893 record64_type, size_int (1)));
2895 field_list64 = chainon (field_list64,
2896 make_descriptor_field ("DTYPE",
2897 gnat_type_for_size (8, 1),
2898 record64_type, size_int (dtype)));
2899 field_list64 = chainon (field_list64,
2900 make_descriptor_field ("CLASS",
2901 gnat_type_for_size (8, 1),
2902 record64_type, size_int (class)));
2904 field_list64 = chainon (field_list64,
2905 make_descriptor_field ("MBMO",
2906 gnat_type_for_size (32, 1),
2907 record64_type, ssize_int (-1)));
2910 = chainon (field_list64,
2911 make_descriptor_field
2912 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2913 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2915 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2918 = chainon (field_list64,
2919 make_descriptor_field
2920 ("POINTER", pointer64_type, record64_type,
2921 build_unary_op (ADDR_EXPR,
2923 build0 (PLACEHOLDER_EXPR, type))));
2928 case By_Descriptor_S:
2931 case By_Descriptor_SB:
2933 = chainon (field_list64,
2934 make_descriptor_field
2935 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2936 TREE_CODE (type) == ARRAY_TYPE
2937 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2939 = chainon (field_list64,
2940 make_descriptor_field
2941 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2942 TREE_CODE (type) == ARRAY_TYPE
2943 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2946 case By_Descriptor_A:
2947 case By_Descriptor_NCA:
2948 field_list64 = chainon (field_list64,
2949 make_descriptor_field ("SCALE",
2950 gnat_type_for_size (8, 1),
2954 field_list64 = chainon (field_list64,
2955 make_descriptor_field ("DIGITS",
2956 gnat_type_for_size (8, 1),
2961 = chainon (field_list64,
2962 make_descriptor_field
2963 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2964 size_int (mech == By_Descriptor_NCA
2966 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2967 : (TREE_CODE (type) == ARRAY_TYPE
2968 && TYPE_CONVENTION_FORTRAN_P (type)
2971 field_list64 = chainon (field_list64,
2972 make_descriptor_field ("DIMCT",
2973 gnat_type_for_size (8, 1),
2977 field_list64 = chainon (field_list64,
2978 make_descriptor_field ("MBZ",
2979 gnat_type_for_size (32, 1),
2982 field_list64 = chainon (field_list64,
2983 make_descriptor_field ("ARSIZE",
2984 gnat_type_for_size (64, 1),
2986 size_in_bytes (type)));
2988 /* Now build a pointer to the 0,0,0... element. */
2989 tem = build0 (PLACEHOLDER_EXPR, type);
2990 for (i = 0, inner_type = type; i < ndim;
2991 i++, inner_type = TREE_TYPE (inner_type))
2992 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2993 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2994 NULL_TREE, NULL_TREE);
2997 = chainon (field_list64,
2998 make_descriptor_field
3000 build_pointer_type_for_mode (inner_type, DImode, false),
3003 build_pointer_type_for_mode (inner_type, DImode,
3007 /* Next come the addressing coefficients. */
3008 tem = size_one_node;
3009 for (i = 0; i < ndim; i++)
3013 = size_binop (MULT_EXPR, tem,
3014 size_binop (PLUS_EXPR,
3015 size_binop (MINUS_EXPR,
3016 TYPE_MAX_VALUE (idx_arr[i]),
3017 TYPE_MIN_VALUE (idx_arr[i])),
3020 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3021 fname[1] = '0' + i, fname[2] = 0;
3023 = chainon (field_list64,
3024 make_descriptor_field (fname,
3025 gnat_type_for_size (64, 1),
3026 record64_type, idx_length));
3028 if (mech == By_Descriptor_NCA)
3032 /* Finally here are the bounds. */
3033 for (i = 0; i < ndim; i++)
3037 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3039 = chainon (field_list64,
3040 make_descriptor_field
3041 (fname, gnat_type_for_size (64, 1), record64_type,
3042 TYPE_MIN_VALUE (idx_arr[i])));
3046 = chainon (field_list64,
3047 make_descriptor_field
3048 (fname, gnat_type_for_size (64, 1), record64_type,
3049 TYPE_MAX_VALUE (idx_arr[i])));
3054 post_error ("unsupported descriptor type for &", gnat_entity);
3057 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3058 finish_record_type (record64_type, field_list64, 0, true);
3059 return record64_type;
3062 /* Utility routine for above code to make a field. */
3065 make_descriptor_field (const char *name, tree type,
3066 tree rec_type, tree initial)
3069 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3071 DECL_INITIAL (field) = initial;
3075 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3076 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3077 which the VMS descriptor is passed. */
3080 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3082 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3083 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3084 /* The CLASS field is the 3rd field in the descriptor. */
3085 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3086 /* The POINTER field is the 6th field in the descriptor. */
3087 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3089 /* Retrieve the value of the POINTER field. */
3091 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3093 if (POINTER_TYPE_P (gnu_type))
3094 return convert (gnu_type, gnu_expr64);
3096 else if (TYPE_FAT_POINTER_P (gnu_type))
3098 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3099 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3100 tree template_type = TREE_TYPE (p_bounds_type);
3101 tree min_field = TYPE_FIELDS (template_type);
3102 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3103 tree template, template_addr, aflags, dimct, t, u;
3104 /* See the head comment of build_vms_descriptor. */
3105 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3106 tree lfield, ufield;
3108 /* Convert POINTER to the type of the P_ARRAY field. */
3109 gnu_expr64 = convert (p_array_type, gnu_expr64);
3113 case 1: /* Class S */
3114 case 15: /* Class SB */
3115 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3116 t = TREE_CHAIN (TREE_CHAIN (class));
3117 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3118 t = tree_cons (min_field,
3119 convert (TREE_TYPE (min_field), integer_one_node),
3120 tree_cons (max_field,
3121 convert (TREE_TYPE (max_field), t),
3123 template = gnat_build_constructor (template_type, t);
3124 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3126 /* For class S, we are done. */
3130 /* Test that we really have a SB descriptor, like DEC Ada. */
3131 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3132 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3133 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3134 /* If so, there is already a template in the descriptor and
3135 it is located right after the POINTER field. The fields are
3136 64bits so they must be repacked. */
3137 t = TREE_CHAIN (pointer64);
3138 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3139 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3142 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3144 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3146 /* Build the template in the form of a constructor. */
3147 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3148 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3149 ufield, NULL_TREE));
3150 template = gnat_build_constructor (template_type, t);
3152 /* Otherwise use the {1, LENGTH} template we build above. */
3153 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3154 build_unary_op (ADDR_EXPR, p_bounds_type,
3159 case 4: /* Class A */
3160 /* The AFLAGS field is the 3rd field after the pointer in the
3162 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3163 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3164 /* The DIMCT field is the next field in the descriptor after
3167 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3168 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3169 or FL_COEFF or FL_BOUNDS not set. */
3170 u = build_int_cst (TREE_TYPE (aflags), 192);
3171 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3172 build_binary_op (NE_EXPR, integer_type_node,
3174 convert (TREE_TYPE (dimct),
3176 build_binary_op (NE_EXPR, integer_type_node,
3177 build2 (BIT_AND_EXPR,
3181 /* There is already a template in the descriptor and it is located
3182 in block 3. The fields are 64bits so they must be repacked. */
3183 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3185 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3186 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3189 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3191 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3193 /* Build the template in the form of a constructor. */
3194 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3195 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3196 ufield, NULL_TREE));
3197 template = gnat_build_constructor (template_type, t);
3198 template = build3 (COND_EXPR, p_bounds_type, u,
3199 build_call_raise (CE_Length_Check_Failed, Empty,
3200 N_Raise_Constraint_Error),
3202 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3205 case 10: /* Class NCA */
3207 post_error ("unsupported descriptor type for &", gnat_subprog);
3208 template_addr = integer_zero_node;
3212 /* Build the fat pointer in the form of a constructor. */
3213 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3214 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3215 template_addr, NULL_TREE));
3216 return gnat_build_constructor (gnu_type, t);
3223 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3224 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3225 which the VMS descriptor is passed. */
3228 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3230 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3231 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3232 /* The CLASS field is the 3rd field in the descriptor. */
3233 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3234 /* The POINTER field is the 4th field in the descriptor. */
3235 tree pointer = TREE_CHAIN (class);
3237 /* Retrieve the value of the POINTER field. */
3239 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3241 if (POINTER_TYPE_P (gnu_type))
3242 return convert (gnu_type, gnu_expr32);
3244 else if (TYPE_FAT_POINTER_P (gnu_type))
3246 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3247 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3248 tree template_type = TREE_TYPE (p_bounds_type);
3249 tree min_field = TYPE_FIELDS (template_type);
3250 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3251 tree template, template_addr, aflags, dimct, t, u;
3252 /* See the head comment of build_vms_descriptor. */
3253 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3255 /* Convert POINTER to the type of the P_ARRAY field. */
3256 gnu_expr32 = convert (p_array_type, gnu_expr32);
3260 case 1: /* Class S */
3261 case 15: /* Class SB */
3262 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3263 t = TYPE_FIELDS (desc_type);
3264 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3265 t = tree_cons (min_field,
3266 convert (TREE_TYPE (min_field), integer_one_node),
3267 tree_cons (max_field,
3268 convert (TREE_TYPE (max_field), t),
3270 template = gnat_build_constructor (template_type, t);
3271 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3273 /* For class S, we are done. */
3277 /* Test that we really have a SB descriptor, like DEC Ada. */
3278 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3279 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3280 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3281 /* If so, there is already a template in the descriptor and
3282 it is located right after the POINTER field. */
3283 t = TREE_CHAIN (pointer);
3284 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3285 /* Otherwise use the {1, LENGTH} template we build above. */
3286 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3287 build_unary_op (ADDR_EXPR, p_bounds_type,
3292 case 4: /* Class A */
3293 /* The AFLAGS field is the 7th field in the descriptor. */
3294 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3295 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3296 /* The DIMCT field is the 8th field in the descriptor. */
3298 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3299 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3300 or FL_COEFF or FL_BOUNDS not set. */
3301 u = build_int_cst (TREE_TYPE (aflags), 192);
3302 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3303 build_binary_op (NE_EXPR, integer_type_node,
3305 convert (TREE_TYPE (dimct),
3307 build_binary_op (NE_EXPR, integer_type_node,
3308 build2 (BIT_AND_EXPR,
3312 /* There is already a template in the descriptor and it is
3313 located at the start of block 3 (12th field). */
3314 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3315 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3316 template = build3 (COND_EXPR, p_bounds_type, u,
3317 build_call_raise (CE_Length_Check_Failed, Empty,
3318 N_Raise_Constraint_Error),
3320 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3323 case 10: /* Class NCA */
3325 post_error ("unsupported descriptor type for &", gnat_subprog);
3326 template_addr = integer_zero_node;
3330 /* Build the fat pointer in the form of a constructor. */
3331 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3332 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3333 template_addr, NULL_TREE));
3335 return gnat_build_constructor (gnu_type, t);
3342 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3343 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3344 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3345 VMS descriptor is passed. */
3348 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3349 Entity_Id gnat_subprog)
3351 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3352 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3353 tree mbo = TYPE_FIELDS (desc_type);
3354 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3355 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3356 tree is64bit, gnu_expr32, gnu_expr64;
3358 /* If the field name is not MBO, it must be 32-bit and no alternate.
3359 Otherwise primary must be 64-bit and alternate 32-bit. */
3360 if (strcmp (mbostr, "MBO") != 0)
3361 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3363 /* Build the test for 64-bit descriptor. */
3364 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3365 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3367 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3368 build_binary_op (EQ_EXPR, integer_type_node,
3369 convert (integer_type_node, mbo),
3371 build_binary_op (EQ_EXPR, integer_type_node,
3372 convert (integer_type_node, mbmo),
3373 integer_minus_one_node));
3375 /* Build the 2 possible end results. */
3376 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3377 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3378 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3380 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3383 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3384 and the GNAT node GNAT_SUBPROG. */
3387 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3389 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3390 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3391 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3394 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3395 gnu_param_list = NULL_TREE;
3397 begin_subprog_body (gnu_stub_decl);
3400 start_stmt_group ();
3402 /* Loop over the parameters of the stub and translate any of them
3403 passed by descriptor into a by reference one. */
3404 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3405 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3407 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3408 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3410 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3412 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3414 DECL_PARM_ALT_TYPE (gnu_stub_param),
3417 gnu_param = gnu_stub_param;
3419 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3422 gnu_body = end_stmt_group ();
3424 /* Invoke the internal subprogram. */
3425 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3427 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3429 nreverse (gnu_param_list));
3431 /* Propagate the return value, if any. */
3432 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3433 append_to_statement_list (gnu_subprog_call, &gnu_body);
3435 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3441 allocate_struct_function (gnu_stub_decl, false);
3442 end_subprog_body (gnu_body, false);
3445 /* Build a type to be used to represent an aliased object whose nominal
3446 type is an unconstrained array. This consists of a RECORD_TYPE containing
3447 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3448 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3449 is used to represent an arbitrary unconstrained object. Use NAME
3450 as the name of the record. */
3453 build_unc_object_type (tree template_type, tree object_type, tree name)
3455 tree type = make_node (RECORD_TYPE);
3456 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3457 template_type, type, 0, 0, 0, 1);
3458 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3461 TYPE_NAME (type) = name;
3462 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3463 finish_record_type (type,
3464 chainon (chainon (NULL_TREE, template_field),
3471 /* Same, taking a thin or fat pointer type instead of a template type. */
3474 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3479 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3482 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3483 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3484 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3485 return build_unc_object_type (template_type, object_type, name);
3488 /* Shift the component offsets within an unconstrained object TYPE to make it
3489 suitable for use as a designated type for thin pointers. */
3492 shift_unc_components_for_thin_pointers (tree type)
3494 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3495 allocated past the BOUNDS template. The designated type is adjusted to
3496 have ARRAY at position zero and the template at a negative offset, so
3497 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3499 tree bounds_field = TYPE_FIELDS (type);
3500 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3502 DECL_FIELD_OFFSET (bounds_field)
3503 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3505 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3506 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3509 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3510 In the normal case this is just two adjustments, but we have more to
3511 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3514 update_pointer_to (tree old_type, tree new_type)
3516 tree ptr = TYPE_POINTER_TO (old_type);
3517 tree ref = TYPE_REFERENCE_TO (old_type);
3521 /* If this is the main variant, process all the other variants first. */
3522 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3523 for (type = TYPE_NEXT_VARIANT (old_type); type;
3524 type = TYPE_NEXT_VARIANT (type))
3525 update_pointer_to (type, new_type);
3527 /* If no pointers and no references, we are done. */
3531 /* Merge the old type qualifiers in the new type.
3533 Each old variant has qualifiers for specific reasons, and the new
3534 designated type as well. Each set of qualifiers represents useful
3535 information grabbed at some point, and merging the two simply unifies
3536 these inputs into the final type description.
3538 Consider for instance a volatile type frozen after an access to constant
3539 type designating it; after the designated type's freeze, we get here with
3540 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3541 when the access type was processed. We will make a volatile and readonly
3542 designated type, because that's what it really is.
3544 We might also get here for a non-dummy OLD_TYPE variant with different
3545 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3546 to private record type elaboration (see the comments around the call to
3547 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3548 the qualifiers in those cases too, to avoid accidentally discarding the
3549 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3551 = build_qualified_type (new_type,
3552 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3554 /* If old type and new type are identical, there is nothing to do. */
3555 if (old_type == new_type)
3558 /* Otherwise, first handle the simple case. */
3559 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3561 TYPE_POINTER_TO (new_type) = ptr;
3562 TYPE_REFERENCE_TO (new_type) = ref;
3564 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3565 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3566 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3567 TREE_TYPE (ptr1) = new_type;
3569 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3570 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3571 ref1 = TYPE_NEXT_VARIANT (ref1))
3572 TREE_TYPE (ref1) = new_type;
3575 /* Now deal with the unconstrained array case. In this case the "pointer"
3576 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3577 Turn them into pointers to the correct types using update_pointer_to. */
3578 else if (!TYPE_FAT_POINTER_P (ptr))
3583 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3584 tree array_field = TYPE_FIELDS (ptr);
3585 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3586 tree new_ptr = TYPE_POINTER_TO (new_type);
3590 /* Make pointers to the dummy template point to the real template. */
3592 (TREE_TYPE (TREE_TYPE (bounds_field)),
3593 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3595 /* The references to the template bounds present in the array type
3596 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3597 are updating PTR to make it a full replacement for NEW_PTR as
3598 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3599 to make it of type PTR. */
3600 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3601 build0 (PLACEHOLDER_EXPR, ptr),
3602 bounds_field, NULL_TREE);
3604 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3605 to the dummy array point to it. */
3607 (TREE_TYPE (TREE_TYPE (array_field)),
3608 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3609 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3611 /* Make PTR the pointer to NEW_TYPE. */
3612 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3613 = TREE_TYPE (new_type) = ptr;
3615 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3616 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3618 /* Now handle updating the allocation record, what the thin pointer
3619 points to. Update all pointers from the old record into the new
3620 one, update the type of the array field, and recompute the size. */
3621 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3623 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3624 = TREE_TYPE (TREE_TYPE (array_field));
3626 /* The size recomputation needs to account for alignment constraints, so
3627 we let layout_type work it out. This will reset the field offsets to
3628 what they would be in a regular record, so we shift them back to what
3629 we want them to be for a thin pointer designated type afterwards. */
3630 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3631 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3632 TYPE_SIZE (new_obj_rec) = 0;
3633 layout_type (new_obj_rec);
3635 shift_unc_components_for_thin_pointers (new_obj_rec);
3637 /* We are done, at last. */
3638 rest_of_record_type_compilation (ptr);
3642 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3643 unconstrained one. This involves making or finding a template. */
3646 convert_to_fat_pointer (tree type, tree expr)
3648 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3649 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3650 tree etype = TREE_TYPE (expr);
3653 /* If EXPR is null, make a fat pointer that contains null pointers to the
3654 template and array. */
3655 if (integer_zerop (expr))
3657 gnat_build_constructor
3659 tree_cons (TYPE_FIELDS (type),
3660 convert (p_array_type, expr),
3661 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3662 convert (build_pointer_type (template_type),
3666 /* If EXPR is a thin pointer, make template and data from the record.. */
3667 else if (TYPE_THIN_POINTER_P (etype))
3669 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3671 expr = save_expr (expr);
3672 if (TREE_CODE (expr) == ADDR_EXPR)
3673 expr = TREE_OPERAND (expr, 0);
3675 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3677 template = build_component_ref (expr, NULL_TREE, fields, false);
3678 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3679 build_component_ref (expr, NULL_TREE,
3680 TREE_CHAIN (fields), false));
3683 /* Otherwise, build the constructor for the template. */
3685 template = build_template (template_type, TREE_TYPE (etype), expr);
3687 /* The final result is a constructor for the fat pointer.
3689 If EXPR is an argument of a foreign convention subprogram, the type it
3690 points to is directly the component type. In this case, the expression
3691 type may not match the corresponding FIELD_DECL type at this point, so we
3692 call "convert" here to fix that up if necessary. This type consistency is
3693 required, for instance because it ensures that possible later folding of
3694 COMPONENT_REFs against this constructor always yields something of the
3695 same type as the initial reference.
3697 Note that the call to "build_template" above is still fine because it
3698 will only refer to the provided TEMPLATE_TYPE in this case. */
3700 gnat_build_constructor
3702 tree_cons (TYPE_FIELDS (type),
3703 convert (p_array_type, expr),
3704 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3705 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3709 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3710 is something that is a fat pointer, so convert to it first if it EXPR
3711 is not already a fat pointer. */
3714 convert_to_thin_pointer (tree type, tree expr)
3716 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3718 = convert_to_fat_pointer
3719 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3721 /* We get the pointer to the data and use a NOP_EXPR to make it the
3723 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3725 expr = build1 (NOP_EXPR, type, expr);
3730 /* Create an expression whose value is that of EXPR,
3731 converted to type TYPE. The TREE_TYPE of the value
3732 is always TYPE. This function implements all reasonable
3733 conversions; callers should filter out those that are
3734 not permitted by the language being compiled. */
3737 convert (tree type, tree expr)
3739 enum tree_code code = TREE_CODE (type);
3740 tree etype = TREE_TYPE (expr);
3741 enum tree_code ecode = TREE_CODE (etype);
3743 /* If EXPR is already the right type, we are done. */
3747 /* If both input and output have padding and are of variable size, do this
3748 as an unchecked conversion. Likewise if one is a mere variant of the
3749 other, so we avoid a pointless unpad/repad sequence. */
3750 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3751 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3752 && (!TREE_CONSTANT (TYPE_SIZE (type))
3753 || !TREE_CONSTANT (TYPE_SIZE (etype))
3754 || gnat_types_compatible_p (type, etype)
3755 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3756 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3759 /* If the output type has padding, convert to the inner type and
3760 make a constructor to build the record. */
3761 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3763 /* If we previously converted from another type and our type is
3764 of variable size, remove the conversion to avoid the need for
3765 variable-size temporaries. Likewise for a conversion between
3766 original and packable version. */
3767 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3768 && (!TREE_CONSTANT (TYPE_SIZE (type))
3769 || (ecode == RECORD_TYPE
3770 && TYPE_NAME (etype)
3771 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3772 expr = TREE_OPERAND (expr, 0);
3774 /* If we are just removing the padding from expr, convert the original
3775 object if we have variable size in order to avoid the need for some
3776 variable-size temporaries. Likewise if the padding is a mere variant
3777 of the other, so we avoid a pointless unpad/repad sequence. */
3778 if (TREE_CODE (expr) == COMPONENT_REF
3779 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3780 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3781 && (!TREE_CONSTANT (TYPE_SIZE (type))
3782 || gnat_types_compatible_p (type,
3783 TREE_TYPE (TREE_OPERAND (expr, 0)))
3784 || (ecode == RECORD_TYPE
3785 && TYPE_NAME (etype)
3786 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3787 return convert (type, TREE_OPERAND (expr, 0));
3789 /* If the result type is a padded type with a self-referentially-sized
3790 field and the expression type is a record, do this as an
3791 unchecked conversion. */
3792 else if (TREE_CODE (etype) == RECORD_TYPE
3793 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3794 return unchecked_convert (type, expr, false);
3798 gnat_build_constructor (type,
3799 tree_cons (TYPE_FIELDS (type),
3801 (TYPE_FIELDS (type)),
3806 /* If the input type has padding, remove it and convert to the output type.
3807 The conditions ordering is arranged to ensure that the output type is not
3808 a padding type here, as it is not clear whether the conversion would
3809 always be correct if this was to happen. */
3810 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3814 /* If we have just converted to this padded type, just get the
3815 inner expression. */
3816 if (TREE_CODE (expr) == CONSTRUCTOR
3817 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3818 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3819 == TYPE_FIELDS (etype))
3821 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3823 /* Otherwise, build an explicit component reference. */
3826 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3828 return convert (type, unpadded);
3831 /* If the input is a biased type, adjust first. */
3832 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3833 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3834 fold_convert (TREE_TYPE (etype),
3836 TYPE_MIN_VALUE (etype)));
3838 /* If the input is a justified modular type, we need to extract the actual
3839 object before converting it to any other type with the exceptions of an
3840 unconstrained array or of a mere type variant. It is useful to avoid the
3841 extraction and conversion in the type variant case because it could end
3842 up replacing a VAR_DECL expr by a constructor and we might be about the
3843 take the address of the result. */
3844 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3845 && code != UNCONSTRAINED_ARRAY_TYPE
3846 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3847 return convert (type, build_component_ref (expr, NULL_TREE,
3848 TYPE_FIELDS (etype), false));
3850 /* If converting to a type that contains a template, convert to the data
3851 type and then build the template. */
3852 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3854 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3856 /* If the source already has a template, get a reference to the
3857 associated array only, as we are going to rebuild a template
3858 for the target type anyway. */
3859 expr = maybe_unconstrained_array (expr);
3862 gnat_build_constructor
3864 tree_cons (TYPE_FIELDS (type),
3865 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3866 obj_type, NULL_TREE),
3867 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3868 convert (obj_type, expr), NULL_TREE)));
3871 /* There are some special cases of expressions that we process
3873 switch (TREE_CODE (expr))
3879 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3880 conversion in gnat_expand_expr. NULL_EXPR does not represent
3881 and actual value, so no conversion is needed. */
3882 expr = copy_node (expr);
3883 TREE_TYPE (expr) = type;
3887 /* If we are converting a STRING_CST to another constrained array type,
3888 just make a new one in the proper type. */
3889 if (code == ecode && AGGREGATE_TYPE_P (etype)
3890 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3891 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3893 expr = copy_node (expr);
3894 TREE_TYPE (expr) = type;
3900 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3901 a new one in the proper type. */
3902 if (code == ecode && gnat_types_compatible_p (type, etype))
3904 expr = copy_node (expr);
3905 TREE_TYPE (expr) = type;
3909 /* Likewise for a conversion between original and packable version, but
3910 we have to work harder in order to preserve type consistency. */
3912 && code == RECORD_TYPE
3913 && TYPE_NAME (type) == TYPE_NAME (etype))
3915 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3916 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3917 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3918 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3919 unsigned HOST_WIDE_INT idx;
3922 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3924 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3925 /* We expect only simple constructors. Otherwise, punt. */
3926 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3929 elt->value = convert (TREE_TYPE (field), value);
3930 efield = TREE_CHAIN (efield);
3931 field = TREE_CHAIN (field);
3936 expr = copy_node (expr);
3937 TREE_TYPE (expr) = type;
3938 CONSTRUCTOR_ELTS (expr) = v;
3944 case UNCONSTRAINED_ARRAY_REF:
3945 /* Convert this to the type of the inner array by getting the address of
3946 the array from the template. */
3947 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3948 build_component_ref (TREE_OPERAND (expr, 0),
3949 get_identifier ("P_ARRAY"),
3951 etype = TREE_TYPE (expr);
3952 ecode = TREE_CODE (etype);
3955 case VIEW_CONVERT_EXPR:
3957 /* GCC 4.x is very sensitive to type consistency overall, and view
3958 conversions thus are very frequent. Even though just "convert"ing
3959 the inner operand to the output type is fine in most cases, it
3960 might expose unexpected input/output type mismatches in special
3961 circumstances so we avoid such recursive calls when we can. */
3962 tree op0 = TREE_OPERAND (expr, 0);
3964 /* If we are converting back to the original type, we can just
3965 lift the input conversion. This is a common occurrence with
3966 switches back-and-forth amongst type variants. */
3967 if (type == TREE_TYPE (op0))
3970 /* Otherwise, if we're converting between two aggregate types, we
3971 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3972 in place or to just convert the inner expression. */
3973 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3975 /* If we are converting between mere variants, we can just
3976 substitute the VIEW_CONVERT_EXPR in place. */
3977 if (gnat_types_compatible_p (type, etype))
3978 return build1 (VIEW_CONVERT_EXPR, type, op0);
3980 /* Otherwise, we may just bypass the input view conversion unless
3981 one of the types is a fat pointer, which is handled by
3982 specialized code below which relies on exact type matching. */
3983 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3984 return convert (type, op0);
3990 /* If both types are record types, just convert the pointer and
3991 make a new INDIRECT_REF.
3993 ??? Disable this for now since it causes problems with the
3994 code in build_binary_op for MODIFY_EXPR which wants to
3995 strip off conversions. But that code really is a mess and
3996 we need to do this a much better way some time. */
3998 && (TREE_CODE (type) == RECORD_TYPE
3999 || TREE_CODE (type) == UNION_TYPE)
4000 && (TREE_CODE (etype) == RECORD_TYPE
4001 || TREE_CODE (etype) == UNION_TYPE)
4002 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4003 return build_unary_op (INDIRECT_REF, NULL_TREE,
4004 convert (build_pointer_type (type),
4005 TREE_OPERAND (expr, 0)));
4012 /* Check for converting to a pointer to an unconstrained array. */
4013 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4014 return convert_to_fat_pointer (type, expr);
4016 /* If we are converting between two aggregate types that are mere
4017 variants, just make a VIEW_CONVERT_EXPR. */
4018 else if (code == ecode
4019 && AGGREGATE_TYPE_P (type)
4020 && gnat_types_compatible_p (type, etype))
4021 return build1 (VIEW_CONVERT_EXPR, type, expr);
4023 /* In all other cases of related types, make a NOP_EXPR. */
4024 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4025 || (code == INTEGER_CST && ecode == INTEGER_CST
4026 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4027 return fold_convert (type, expr);
4032 return fold_build1 (CONVERT_EXPR, type, expr);
4035 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4036 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4037 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4038 return unchecked_convert (type, expr, false);
4039 else if (TYPE_BIASED_REPRESENTATION_P (type))
4040 return fold_convert (type,
4041 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4042 convert (TREE_TYPE (type), expr),
4043 TYPE_MIN_VALUE (type)));
4045 /* ... fall through ... */
4049 /* If we are converting an additive expression to an integer type
4050 with lower precision, be wary of the optimization that can be
4051 applied by convert_to_integer. There are 2 problematic cases:
4052 - if the first operand was originally of a biased type,
4053 because we could be recursively called to convert it
4054 to an intermediate type and thus rematerialize the
4055 additive operator endlessly,
4056 - if the expression contains a placeholder, because an
4057 intermediate conversion that changes the sign could
4058 be inserted and thus introduce an artificial overflow
4059 at compile time when the placeholder is substituted. */
4060 if (code == INTEGER_TYPE
4061 && ecode == INTEGER_TYPE
4062 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4063 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4065 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4067 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4068 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4069 || CONTAINS_PLACEHOLDER_P (expr))
4070 return build1 (NOP_EXPR, type, expr);
4073 return fold (convert_to_integer (type, expr));
4076 case REFERENCE_TYPE:
4077 /* If converting between two pointers to records denoting
4078 both a template and type, adjust if needed to account
4079 for any differing offsets, since one might be negative. */
4080 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4083 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4084 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4085 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4086 sbitsize_int (BITS_PER_UNIT));
4088 expr = build1 (NOP_EXPR, type, expr);
4089 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4090 if (integer_zerop (byte_diff))
4093 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4094 fold (convert (sizetype, byte_diff)));
4097 /* If converting to a thin pointer, handle specially. */
4098 if (TYPE_THIN_POINTER_P (type)
4099 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4100 return convert_to_thin_pointer (type, expr);
4102 /* If converting fat pointer to normal pointer, get the pointer to the
4103 array and then convert it. */
4104 else if (TYPE_FAT_POINTER_P (etype))
4105 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4108 return fold (convert_to_pointer (type, expr));
4111 return fold (convert_to_real (type, expr));
4114 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4116 gnat_build_constructor
4117 (type, tree_cons (TYPE_FIELDS (type),
4118 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4121 /* ... fall through ... */
4124 /* In these cases, assume the front-end has validated the conversion.
4125 If the conversion is valid, it will be a bit-wise conversion, so
4126 it can be viewed as an unchecked conversion. */
4127 return unchecked_convert (type, expr, false);
4130 /* This is a either a conversion between a tagged type and some
4131 subtype, which we have to mark as a UNION_TYPE because of
4132 overlapping fields or a conversion of an Unchecked_Union. */
4133 return unchecked_convert (type, expr, false);
4135 case UNCONSTRAINED_ARRAY_TYPE:
4136 /* If EXPR is a constrained array, take its address, convert it to a
4137 fat pointer, and then dereference it. Likewise if EXPR is a
4138 record containing both a template and a constrained array.
4139 Note that a record representing a justified modular type
4140 always represents a packed constrained array. */
4141 if (ecode == ARRAY_TYPE
4142 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4143 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4144 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4147 (INDIRECT_REF, NULL_TREE,
4148 convert_to_fat_pointer (TREE_TYPE (type),
4149 build_unary_op (ADDR_EXPR,
4152 /* Do something very similar for converting one unconstrained
4153 array to another. */
4154 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4156 build_unary_op (INDIRECT_REF, NULL_TREE,
4157 convert (TREE_TYPE (type),
4158 build_unary_op (ADDR_EXPR,
4164 return fold (convert_to_complex (type, expr));
4171 /* Remove all conversions that are done in EXP. This includes converting
4172 from a padded type or to a justified modular type. If TRUE_ADDRESS
4173 is true, always return the address of the containing object even if
4174 the address is not bit-aligned. */
4177 remove_conversions (tree exp, bool true_address)
4179 switch (TREE_CODE (exp))
4183 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4184 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4186 remove_conversions (VEC_index (constructor_elt,
4187 CONSTRUCTOR_ELTS (exp), 0)->value,
4192 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4193 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4194 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4197 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4199 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4208 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4209 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4210 likewise return an expression pointing to the underlying array. */
4213 maybe_unconstrained_array (tree exp)
4215 enum tree_code code = TREE_CODE (exp);
4218 switch (TREE_CODE (TREE_TYPE (exp)))
4220 case UNCONSTRAINED_ARRAY_TYPE:
4221 if (code == UNCONSTRAINED_ARRAY_REF)
4224 = build_unary_op (INDIRECT_REF, NULL_TREE,
4225 build_component_ref (TREE_OPERAND (exp, 0),
4226 get_identifier ("P_ARRAY"),
4228 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4232 else if (code == NULL_EXPR)
4233 return build1 (NULL_EXPR,
4234 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4235 (TREE_TYPE (TREE_TYPE (exp))))),
4236 TREE_OPERAND (exp, 0));
4239 /* If this is a padded type, convert to the unpadded type and see if
4240 it contains a template. */
4241 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4243 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4244 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4245 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4247 build_component_ref (new, NULL_TREE,
4248 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4251 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4253 build_component_ref (exp, NULL_TREE,
4254 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4264 /* Return true if EXPR is an expression that can be folded as an operand
4265 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4269 can_fold_for_view_convert_p (tree expr)
4273 /* The folder will fold NOP_EXPRs between integral types with the same
4274 precision (in the middle-end's sense). We cannot allow it if the
4275 types don't have the same precision in the Ada sense as well. */
4276 if (TREE_CODE (expr) != NOP_EXPR)
4279 t1 = TREE_TYPE (expr);
4280 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4282 /* Defer to the folder for non-integral conversions. */
4283 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4286 /* Only fold conversions that preserve both precisions. */
4287 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4288 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4294 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4295 If NOTRUNC_P is true, truncation operations should be suppressed.
4297 Special care is required with (source or target) integral types whose
4298 precision is not equal to their size, to make sure we fetch or assign
4299 the value bits whose location might depend on the endianness, e.g.
4301 Rmsize : constant := 8;
4302 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4304 type Bit_Array is array (1 .. Rmsize) of Boolean;
4305 pragma Pack (Bit_Array);
4307 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4309 Value : Int := 2#1000_0001#;
4310 Vbits : Bit_Array := To_Bit_Array (Value);
4312 we expect the 8 bits at Vbits'Address to always contain Value, while
4313 their original location depends on the endianness, at Value'Address
4314 on a little-endian architecture but not on a big-endian one.
4316 ??? There is a problematic discrepancy between what is called precision
4317 here (and more generally throughout gigi) for integral types and what is
4318 called precision in the middle-end. In the former case it's the RM size
4319 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4320 latter case, the hitch being that they are not equal when they matter,
4321 that is when the number of value bits is not equal to the type's size:
4322 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4323 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4325 The consequence is that gigi must duplicate code bridging the gap between
4326 the type's size and its precision that exists for TYPE_PRECISION in the
4327 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4328 wary of transformations applied in the middle-end based on TYPE_PRECISION
4329 because this value doesn't reflect the actual precision for Ada. */
4332 unchecked_convert (tree type, tree expr, bool notrunc_p)
4334 tree etype = TREE_TYPE (expr);
4336 /* If the expression is already the right type, we are done. */
4340 /* If both types types are integral just do a normal conversion.
4341 Likewise for a conversion to an unconstrained array. */
4342 if ((((INTEGRAL_TYPE_P (type)
4343 && !(TREE_CODE (type) == INTEGER_TYPE
4344 && TYPE_VAX_FLOATING_POINT_P (type)))
4345 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4346 || (TREE_CODE (type) == RECORD_TYPE
4347 && TYPE_JUSTIFIED_MODULAR_P (type)))
4348 && ((INTEGRAL_TYPE_P (etype)
4349 && !(TREE_CODE (etype) == INTEGER_TYPE
4350 && TYPE_VAX_FLOATING_POINT_P (etype)))
4351 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4352 || (TREE_CODE (etype) == RECORD_TYPE
4353 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4354 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4356 if (TREE_CODE (etype) == INTEGER_TYPE
4357 && TYPE_BIASED_REPRESENTATION_P (etype))
4359 tree ntype = copy_type (etype);
4360 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4361 TYPE_MAIN_VARIANT (ntype) = ntype;
4362 expr = build1 (NOP_EXPR, ntype, expr);
4365 if (TREE_CODE (type) == INTEGER_TYPE
4366 && TYPE_BIASED_REPRESENTATION_P (type))
4368 tree rtype = copy_type (type);
4369 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4370 TYPE_MAIN_VARIANT (rtype) = rtype;
4371 expr = convert (rtype, expr);
4372 expr = build1 (NOP_EXPR, type, expr);
4375 /* We have another special case: if we are unchecked converting either
4376 a subtype or a type with limited range into a base type, we need to
4377 ensure that VRP doesn't propagate range information because this
4378 conversion may be done precisely to validate that the object is
4379 within the range it is supposed to have. */
4380 else if (TREE_CODE (expr) != INTEGER_CST
4381 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4382 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4383 || TREE_CODE (etype) == ENUMERAL_TYPE
4384 || TREE_CODE (etype) == BOOLEAN_TYPE))
4386 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4387 in order not to be deemed an useless type conversion, it must
4388 be from subtype to base type.
4390 Therefore we first do the bulk of the conversion to a subtype of
4391 the final type. And this conversion must itself not be deemed
4392 useless if the source type is not a subtype because, otherwise,
4393 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4394 why we toggle the unsigned flag in this conversion, which is
4395 harmless since the final conversion is only a reinterpretation
4398 ??? This may raise addressability and/or aliasing issues because
4399 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4400 address of its operand to be taken if it is deemed addressable
4401 and not already in GIMPLE form. */
4403 = gnat_type_for_mode (TYPE_MODE (type), !TYPE_UNSIGNED (etype));
4404 rtype = copy_type (rtype);
4405 TYPE_MAIN_VARIANT (rtype) = rtype;
4406 TREE_TYPE (rtype) = type;
4407 expr = convert (rtype, expr);
4408 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4412 expr = convert (type, expr);
4415 /* If we are converting to an integral type whose precision is not equal
4416 to its size, first unchecked convert to a record that contains an
4417 object of the output type. Then extract the field. */
4418 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4419 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4420 GET_MODE_BITSIZE (TYPE_MODE (type))))
4422 tree rec_type = make_node (RECORD_TYPE);
4423 tree field = create_field_decl (get_identifier ("OBJ"), type,
4424 rec_type, 1, 0, 0, 0);
4426 TYPE_FIELDS (rec_type) = field;
4427 layout_type (rec_type);
4429 expr = unchecked_convert (rec_type, expr, notrunc_p);
4430 expr = build_component_ref (expr, NULL_TREE, field, 0);
4433 /* Similarly if we are converting from an integral type whose precision
4434 is not equal to its size. */
4435 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4436 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4437 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4439 tree rec_type = make_node (RECORD_TYPE);
4441 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4444 TYPE_FIELDS (rec_type) = field;
4445 layout_type (rec_type);
4447 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4448 expr = unchecked_convert (type, expr, notrunc_p);
4451 /* We have a special case when we are converting between two
4452 unconstrained array types. In that case, take the address,
4453 convert the fat pointer types, and dereference. */
4454 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4455 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4456 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4457 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4458 build_unary_op (ADDR_EXPR, NULL_TREE,
4462 expr = maybe_unconstrained_array (expr);
4463 etype = TREE_TYPE (expr);
4464 if (can_fold_for_view_convert_p (expr))
4465 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4467 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4470 /* If the result is an integral type whose precision is not equal to its
4471 size, sign- or zero-extend the result. We need not do this if the input
4472 is an integral type of the same precision and signedness or if the output
4473 is a biased type or if both the input and output are unsigned. */
4475 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4476 && !(TREE_CODE (type) == INTEGER_TYPE
4477 && TYPE_BIASED_REPRESENTATION_P (type))
4478 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4479 GET_MODE_BITSIZE (TYPE_MODE (type)))
4480 && !(INTEGRAL_TYPE_P (etype)
4481 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4482 && operand_equal_p (TYPE_RM_SIZE (type),
4483 (TYPE_RM_SIZE (etype) != 0
4484 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4486 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4488 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4489 TYPE_UNSIGNED (type));
4491 = convert (base_type,
4492 size_binop (MINUS_EXPR,
4494 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4495 TYPE_RM_SIZE (type)));
4498 build_binary_op (RSHIFT_EXPR, base_type,
4499 build_binary_op (LSHIFT_EXPR, base_type,
4500 convert (base_type, expr),
4505 /* An unchecked conversion should never raise Constraint_Error. The code
4506 below assumes that GCC's conversion routines overflow the same way that
4507 the underlying hardware does. This is probably true. In the rare case
4508 when it is false, we can rely on the fact that such conversions are
4509 erroneous anyway. */
4510 if (TREE_CODE (expr) == INTEGER_CST)
4511 TREE_OVERFLOW (expr) = 0;
4513 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4514 show no longer constant. */
4515 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4516 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4518 TREE_CONSTANT (expr) = 0;
4523 /* Return the appropriate GCC tree code for the specified GNAT type,
4524 the latter being a record type as predicated by Is_Record_Type. */
4527 tree_code_for_record_type (Entity_Id gnat_type)
4529 Node_Id component_list
4530 = Component_List (Type_Definition
4532 (Implementation_Base_Type (gnat_type))));
4535 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4536 we have a non-discriminant field outside a variant. In either case,
4537 it's a RECORD_TYPE. */
4539 if (!Is_Unchecked_Union (gnat_type))
4542 for (component = First_Non_Pragma (Component_Items (component_list));
4543 Present (component);
4544 component = Next_Non_Pragma (component))
4545 if (Ekind (Defining_Entity (component)) == E_Component)
4551 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4552 component of an aggregate type. */
4555 type_for_nonaliased_component_p (tree gnu_type)
4557 /* If the type is passed by reference, we may have pointers to the
4558 component so it cannot be made non-aliased. */
4559 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4562 /* We used to say that any component of aggregate type is aliased
4563 because the front-end may take 'Reference of it. The front-end
4564 has been enhanced in the meantime so as to use a renaming instead
4565 in most cases, but the back-end can probably take the address of
4566 such a component too so we go for the conservative stance.
4568 For instance, we might need the address of any array type, even
4569 if normally passed by copy, to construct a fat pointer if the
4570 component is used as an actual for an unconstrained formal.
4572 Likewise for record types: even if a specific record subtype is
4573 passed by copy, the parent type might be passed by ref (e.g. if
4574 it's of variable size) and we might take the address of a child
4575 component to pass to a parent formal. We have no way to check
4576 for such conditions here. */
4577 if (AGGREGATE_TYPE_P (gnu_type))
4583 /* Perform final processing on global variables. */
4586 gnat_write_global_declarations (void)
4588 /* Proceed to optimize and emit assembly.
4589 FIXME: shouldn't be the front end's responsibility to call this. */
4592 /* Emit debug info for all global declarations. */
4593 emit_debug_global_declarations (VEC_address (tree, global_decls),
4594 VEC_length (tree, global_decls));
4597 /* ************************************************************************
4598 * * GCC builtins support *
4599 * ************************************************************************ */
4601 /* The general scheme is fairly simple:
4603 For each builtin function/type to be declared, gnat_install_builtins calls
4604 internal facilities which eventually get to gnat_push_decl, which in turn
4605 tracks the so declared builtin function decls in the 'builtin_decls' global
4606 datastructure. When an Intrinsic subprogram declaration is processed, we
4607 search this global datastructure to retrieve the associated BUILT_IN DECL
4610 /* Search the chain of currently available builtin declarations for a node
4611 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4612 found, if any, or NULL_TREE otherwise. */
4614 builtin_decl_for (tree name)
4619 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4620 if (DECL_NAME (decl) == name)
4626 /* The code below eventually exposes gnat_install_builtins, which declares
4627 the builtin types and functions we might need, either internally or as
4628 user accessible facilities.
4630 ??? This is a first implementation shot, still in rough shape. It is
4631 heavily inspired from the "C" family implementation, with chunks copied
4632 verbatim from there.
4634 Two obvious TODO candidates are
4635 o Use a more efficient name/decl mapping scheme
4636 o Devise a middle-end infrastructure to avoid having to copy
4637 pieces between front-ends. */
4639 /* ----------------------------------------------------------------------- *
4640 * BUILTIN ELEMENTARY TYPES *
4641 * ----------------------------------------------------------------------- */
4643 /* Standard data types to be used in builtin argument declarations. */
4647 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4649 CTI_CONST_STRING_TYPE,
4654 static tree c_global_trees[CTI_MAX];
4656 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4657 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4658 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4660 /* ??? In addition some attribute handlers, we currently don't support a
4661 (small) number of builtin-types, which in turns inhibits support for a
4662 number of builtin functions. */
4663 #define wint_type_node void_type_node
4664 #define intmax_type_node void_type_node
4665 #define uintmax_type_node void_type_node
4667 /* Build the void_list_node (void_type_node having been created). */
4670 build_void_list_node (void)
4672 tree t = build_tree_list (NULL_TREE, void_type_node);
4676 /* Used to help initialize the builtin-types.def table. When a type of
4677 the correct size doesn't exist, use error_mark_node instead of NULL.
4678 The later results in segfaults even when a decl using the type doesn't
4682 builtin_type_for_size (int size, bool unsignedp)
4684 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4685 return type ? type : error_mark_node;
4688 /* Build/push the elementary type decls that builtin functions/types
4692 install_builtin_elementary_types (void)
4694 signed_size_type_node = size_type_node;
4695 pid_type_node = integer_type_node;
4696 void_list_node = build_void_list_node ();
4698 string_type_node = build_pointer_type (char_type_node);
4699 const_string_type_node
4700 = build_pointer_type (build_qualified_type
4701 (char_type_node, TYPE_QUAL_CONST));
4704 /* ----------------------------------------------------------------------- *
4705 * BUILTIN FUNCTION TYPES *
4706 * ----------------------------------------------------------------------- */
4708 /* Now, builtin function types per se. */
4712 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4713 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4714 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4715 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4716 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4717 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4718 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4719 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4720 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4721 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4722 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4723 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4724 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4725 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4726 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4728 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4729 #include "builtin-types.def"
4730 #undef DEF_PRIMITIVE_TYPE
4731 #undef DEF_FUNCTION_TYPE_0
4732 #undef DEF_FUNCTION_TYPE_1
4733 #undef DEF_FUNCTION_TYPE_2
4734 #undef DEF_FUNCTION_TYPE_3
4735 #undef DEF_FUNCTION_TYPE_4
4736 #undef DEF_FUNCTION_TYPE_5
4737 #undef DEF_FUNCTION_TYPE_6
4738 #undef DEF_FUNCTION_TYPE_7
4739 #undef DEF_FUNCTION_TYPE_VAR_0
4740 #undef DEF_FUNCTION_TYPE_VAR_1
4741 #undef DEF_FUNCTION_TYPE_VAR_2
4742 #undef DEF_FUNCTION_TYPE_VAR_3
4743 #undef DEF_FUNCTION_TYPE_VAR_4
4744 #undef DEF_FUNCTION_TYPE_VAR_5
4745 #undef DEF_POINTER_TYPE
4749 typedef enum c_builtin_type builtin_type;
4751 /* A temporary array used in communication with def_fn_type. */
4752 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4754 /* A helper function for install_builtin_types. Build function type
4755 for DEF with return type RET and N arguments. If VAR is true, then the
4756 function should be variadic after those N arguments.
4758 Takes special care not to ICE if any of the types involved are
4759 error_mark_node, which indicates that said type is not in fact available
4760 (see builtin_type_for_size). In which case the function type as a whole
4761 should be error_mark_node. */
4764 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4766 tree args = NULL, t;
4771 for (i = 0; i < n; ++i)
4773 builtin_type a = va_arg (list, builtin_type);
4774 t = builtin_types[a];
4775 if (t == error_mark_node)
4777 args = tree_cons (NULL_TREE, t, args);
4781 args = nreverse (args);
4783 args = chainon (args, void_list_node);
4785 t = builtin_types[ret];
4786 if (t == error_mark_node)
4788 t = build_function_type (t, args);
4791 builtin_types[def] = t;
4794 /* Build the builtin function types and install them in the builtin_types
4795 array for later use in builtin function decls. */
4798 install_builtin_function_types (void)
4800 tree va_list_ref_type_node;
4801 tree va_list_arg_type_node;
4803 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4805 va_list_arg_type_node = va_list_ref_type_node =
4806 build_pointer_type (TREE_TYPE (va_list_type_node));
4810 va_list_arg_type_node = va_list_type_node;
4811 va_list_ref_type_node = build_reference_type (va_list_type_node);
4814 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4815 builtin_types[ENUM] = VALUE;
4816 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4817 def_fn_type (ENUM, RETURN, 0, 0);
4818 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4819 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4820 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4821 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4822 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4823 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4824 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4825 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4826 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4827 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4828 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4830 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4831 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4833 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4834 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4835 def_fn_type (ENUM, RETURN, 1, 0);
4836 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4837 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4838 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4839 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4840 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4841 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4842 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4843 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4844 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4845 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4846 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4847 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4849 #include "builtin-types.def"
4851 #undef DEF_PRIMITIVE_TYPE
4852 #undef DEF_FUNCTION_TYPE_1
4853 #undef DEF_FUNCTION_TYPE_2
4854 #undef DEF_FUNCTION_TYPE_3
4855 #undef DEF_FUNCTION_TYPE_4
4856 #undef DEF_FUNCTION_TYPE_5
4857 #undef DEF_FUNCTION_TYPE_6
4858 #undef DEF_FUNCTION_TYPE_VAR_0
4859 #undef DEF_FUNCTION_TYPE_VAR_1
4860 #undef DEF_FUNCTION_TYPE_VAR_2
4861 #undef DEF_FUNCTION_TYPE_VAR_3
4862 #undef DEF_FUNCTION_TYPE_VAR_4
4863 #undef DEF_FUNCTION_TYPE_VAR_5
4864 #undef DEF_POINTER_TYPE
4865 builtin_types[(int) BT_LAST] = NULL_TREE;
4868 /* ----------------------------------------------------------------------- *
4869 * BUILTIN ATTRIBUTES *
4870 * ----------------------------------------------------------------------- */
4872 enum built_in_attribute
4874 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4875 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4876 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4877 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4878 #include "builtin-attrs.def"
4879 #undef DEF_ATTR_NULL_TREE
4881 #undef DEF_ATTR_IDENT
4882 #undef DEF_ATTR_TREE_LIST
4886 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4889 install_builtin_attributes (void)
4891 /* Fill in the built_in_attributes array. */
4892 #define DEF_ATTR_NULL_TREE(ENUM) \
4893 built_in_attributes[(int) ENUM] = NULL_TREE;
4894 #define DEF_ATTR_INT(ENUM, VALUE) \
4895 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4896 #define DEF_ATTR_IDENT(ENUM, STRING) \
4897 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4898 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4899 built_in_attributes[(int) ENUM] \
4900 = tree_cons (built_in_attributes[(int) PURPOSE], \
4901 built_in_attributes[(int) VALUE], \
4902 built_in_attributes[(int) CHAIN]);
4903 #include "builtin-attrs.def"
4904 #undef DEF_ATTR_NULL_TREE
4906 #undef DEF_ATTR_IDENT
4907 #undef DEF_ATTR_TREE_LIST
4910 /* Handle a "const" attribute; arguments as in
4911 struct attribute_spec.handler. */
4914 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4915 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4918 if (TREE_CODE (*node) == FUNCTION_DECL)
4919 TREE_READONLY (*node) = 1;
4921 *no_add_attrs = true;
4926 /* Handle a "nothrow" attribute; arguments as in
4927 struct attribute_spec.handler. */
4930 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4931 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4934 if (TREE_CODE (*node) == FUNCTION_DECL)
4935 TREE_NOTHROW (*node) = 1;
4937 *no_add_attrs = true;
4942 /* Handle a "pure" attribute; arguments as in
4943 struct attribute_spec.handler. */
4946 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4947 int ARG_UNUSED (flags), bool *no_add_attrs)
4949 if (TREE_CODE (*node) == FUNCTION_DECL)
4950 DECL_PURE_P (*node) = 1;
4951 /* ??? TODO: Support types. */
4954 warning (OPT_Wattributes, "%qE attribute ignored", name);
4955 *no_add_attrs = true;
4961 /* Handle a "no vops" attribute; arguments as in
4962 struct attribute_spec.handler. */
4965 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4966 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4967 bool *ARG_UNUSED (no_add_attrs))
4969 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4970 DECL_IS_NOVOPS (*node) = 1;
4974 /* Helper for nonnull attribute handling; fetch the operand number
4975 from the attribute argument list. */
4978 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4980 /* Verify the arg number is a constant. */
4981 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4982 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4985 *valp = TREE_INT_CST_LOW (arg_num_expr);
4989 /* Handle the "nonnull" attribute. */
4991 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
4992 tree args, int ARG_UNUSED (flags),
4996 unsigned HOST_WIDE_INT attr_arg_num;
4998 /* If no arguments are specified, all pointer arguments should be
4999 non-null. Verify a full prototype is given so that the arguments
5000 will have the correct types when we actually check them later. */
5003 if (!TYPE_ARG_TYPES (type))
5005 error ("nonnull attribute without arguments on a non-prototype");
5006 *no_add_attrs = true;
5011 /* Argument list specified. Verify that each argument number references
5012 a pointer argument. */
5013 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5016 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5018 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5020 error ("nonnull argument has invalid operand number (argument %lu)",
5021 (unsigned long) attr_arg_num);
5022 *no_add_attrs = true;
5026 argument = TYPE_ARG_TYPES (type);
5029 for (ck_num = 1; ; ck_num++)
5031 if (!argument || ck_num == arg_num)
5033 argument = TREE_CHAIN (argument);
5037 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5039 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5040 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5041 *no_add_attrs = true;
5045 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5047 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5048 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5049 *no_add_attrs = true;
5058 /* Handle a "sentinel" attribute. */
5061 handle_sentinel_attribute (tree *node, tree name, tree args,
5062 int ARG_UNUSED (flags), bool *no_add_attrs)
5064 tree params = TYPE_ARG_TYPES (*node);
5068 warning (OPT_Wattributes,
5069 "%qE attribute requires prototypes with named arguments", name);
5070 *no_add_attrs = true;
5074 while (TREE_CHAIN (params))
5075 params = TREE_CHAIN (params);
5077 if (VOID_TYPE_P (TREE_VALUE (params)))
5079 warning (OPT_Wattributes,
5080 "%qE attribute only applies to variadic functions", name);
5081 *no_add_attrs = true;
5087 tree position = TREE_VALUE (args);
5089 if (TREE_CODE (position) != INTEGER_CST)
5091 warning (0, "requested position is not an integer constant");
5092 *no_add_attrs = true;
5096 if (tree_int_cst_lt (position, integer_zero_node))
5098 warning (0, "requested position is less than zero");
5099 *no_add_attrs = true;
5107 /* Handle a "noreturn" attribute; arguments as in
5108 struct attribute_spec.handler. */
5111 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5112 int ARG_UNUSED (flags), bool *no_add_attrs)
5114 tree type = TREE_TYPE (*node);
5116 /* See FIXME comment in c_common_attribute_table. */
5117 if (TREE_CODE (*node) == FUNCTION_DECL)
5118 TREE_THIS_VOLATILE (*node) = 1;
5119 else if (TREE_CODE (type) == POINTER_TYPE
5120 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5122 = build_pointer_type
5123 (build_type_variant (TREE_TYPE (type),
5124 TYPE_READONLY (TREE_TYPE (type)), 1));
5127 warning (OPT_Wattributes, "%qE attribute ignored", name);
5128 *no_add_attrs = true;
5134 /* Handle a "malloc" attribute; arguments as in
5135 struct attribute_spec.handler. */
5138 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5139 int ARG_UNUSED (flags), bool *no_add_attrs)
5141 if (TREE_CODE (*node) == FUNCTION_DECL
5142 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5143 DECL_IS_MALLOC (*node) = 1;
5146 warning (OPT_Wattributes, "%qE attribute ignored", name);
5147 *no_add_attrs = true;
5153 /* Fake handler for attributes we don't properly support. */
5156 fake_attribute_handler (tree * ARG_UNUSED (node),
5157 tree ARG_UNUSED (name),
5158 tree ARG_UNUSED (args),
5159 int ARG_UNUSED (flags),
5160 bool * ARG_UNUSED (no_add_attrs))
5165 /* Handle a "type_generic" attribute. */
5168 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5169 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5170 bool * ARG_UNUSED (no_add_attrs))
5174 /* Ensure we have a function type. */
5175 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5177 params = TYPE_ARG_TYPES (*node);
5178 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5179 params = TREE_CHAIN (params);
5181 /* Ensure we have a variadic function. */
5182 gcc_assert (!params);
5187 /* ----------------------------------------------------------------------- *
5188 * BUILTIN FUNCTIONS *
5189 * ----------------------------------------------------------------------- */
5191 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5192 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5193 if nonansi_p and flag_no_nonansi_builtin. */
5196 def_builtin_1 (enum built_in_function fncode,
5198 enum built_in_class fnclass,
5199 tree fntype, tree libtype,
5200 bool both_p, bool fallback_p,
5201 bool nonansi_p ATTRIBUTE_UNUSED,
5202 tree fnattrs, bool implicit_p)
5205 const char *libname;
5207 /* Preserve an already installed decl. It most likely was setup in advance
5208 (e.g. as part of the internal builtins) for specific reasons. */
5209 if (built_in_decls[(int) fncode] != NULL_TREE)
5212 gcc_assert ((!both_p && !fallback_p)
5213 || !strncmp (name, "__builtin_",
5214 strlen ("__builtin_")));
5216 libname = name + strlen ("__builtin_");
5217 decl = add_builtin_function (name, fntype, fncode, fnclass,
5218 (fallback_p ? libname : NULL),
5221 /* ??? This is normally further controlled by command-line options
5222 like -fno-builtin, but we don't have them for Ada. */
5223 add_builtin_function (libname, libtype, fncode, fnclass,
5226 built_in_decls[(int) fncode] = decl;
5228 implicit_built_in_decls[(int) fncode] = decl;
5231 static int flag_isoc94 = 0;
5232 static int flag_isoc99 = 0;
5234 /* Install what the common builtins.def offers. */
5237 install_builtin_functions (void)
5239 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5240 NONANSI_P, ATTRS, IMPLICIT, COND) \
5242 def_builtin_1 (ENUM, NAME, CLASS, \
5243 builtin_types[(int) TYPE], \
5244 builtin_types[(int) LIBTYPE], \
5245 BOTH_P, FALLBACK_P, NONANSI_P, \
5246 built_in_attributes[(int) ATTRS], IMPLICIT);
5247 #include "builtins.def"
5251 /* ----------------------------------------------------------------------- *
5252 * BUILTIN FUNCTIONS *
5253 * ----------------------------------------------------------------------- */
5255 /* Install the builtin functions we might need. */
5258 gnat_install_builtins (void)
5260 install_builtin_elementary_types ();
5261 install_builtin_function_types ();
5262 install_builtin_attributes ();
5264 /* Install builtins used by generic middle-end pieces first. Some of these
5265 know about internal specificities and control attributes accordingly, for
5266 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5267 the generic definition from builtins.def. */
5268 build_common_builtin_nodes ();
5270 /* Now, install the target specific builtins, such as the AltiVec family on
5271 ppc, and the common set as exposed by builtins.def. */
5272 targetm.init_builtins ();
5273 install_builtin_functions ();
5276 #include "gt-ada-utils.h"
5277 #include "gtype-ada.h"