1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
32 #include "coretypes.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
48 #include "tree-dump.h"
49 #include "pointer-set.h"
50 #include "langhooks.h"
67 #ifndef MAX_FIXED_MODE_SIZE
68 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
71 #ifndef MAX_BITS_PER_WORD
72 #define MAX_BITS_PER_WORD BITS_PER_WORD
75 /* If nonzero, pretend we are allocating at global level. */
78 /* Tree nodes for the various types and decls we create. */
79 tree gnat_std_decls[(int) ADT_LAST];
81 /* Functions to call for each of the possible raise reasons. */
82 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84 /* Forward declarations for handlers of attributes. */
85 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 /* Fake handler for attributes we don't properly support, typically because
96 they'd require dragging a lot of the common-c front-end circuitry. */
97 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
99 /* Table of machine-independent internal attributes for Ada. We support
100 this minimal set of attributes to accommodate the needs of builtins. */
101 const struct attribute_spec gnat_internal_attribute_table[] =
103 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
104 { "const", 0, 0, true, false, false, handle_const_attribute },
105 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
106 { "pure", 0, 0, true, false, false, handle_pure_attribute },
107 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
108 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
109 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
110 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
111 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
112 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
114 /* ??? format and format_arg are heavy and not supported, which actually
115 prevents support for stdio builtins, which we however declare as part
116 of the common builtins.def contents. */
117 { "format", 3, 3, false, true, true, fake_attribute_handler },
118 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
120 { NULL, 0, 0, false, false, false, NULL }
123 /* Associates a GNAT tree node to a GCC tree node. It is used in
124 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
125 of `save_gnu_tree' for more info. */
126 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
128 #define GET_GNU_TREE(GNAT_ENTITY) \
129 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
131 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
132 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
134 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
135 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
137 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
138 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
140 #define GET_DUMMY_NODE(GNAT_ENTITY) \
141 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
143 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
144 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
146 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
147 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
149 /* This variable keeps a table for types for each precision so that we only
150 allocate each of them once. Signed and unsigned types are kept separate.
152 Note that these types are only used when fold-const requests something
153 special. Perhaps we should NOT share these types; we'll see how it
155 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
157 /* Likewise for float types, but record these by mode. */
158 static GTY(()) tree float_types[NUM_MACHINE_MODES];
160 /* For each binding contour we allocate a binding_level structure to indicate
161 the binding depth. */
163 struct gnat_binding_level GTY((chain_next ("%h.chain")))
165 /* The binding level containing this one (the enclosing binding level). */
166 struct gnat_binding_level *chain;
167 /* The BLOCK node for this level. */
169 /* If nonzero, the setjmp buffer that needs to be updated for any
170 variable-sized definition within this context. */
174 /* The binding level currently in effect. */
175 static GTY(()) struct gnat_binding_level *current_binding_level;
177 /* A chain of gnat_binding_level structures awaiting reuse. */
178 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
180 /* An array of global declarations. */
181 static GTY(()) VEC(tree,gc) *global_decls;
183 /* An array of builtin function declarations. */
184 static GTY(()) VEC(tree,gc) *builtin_decls;
186 /* An array of global renaming pointers. */
187 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
189 /* A chain of unused BLOCK nodes. */
190 static GTY((deletable)) tree free_block_chain;
192 static tree merge_sizes (tree, tree, tree, bool, bool);
193 static tree compute_related_constant (tree, tree);
194 static tree split_plus (tree, tree *);
195 static void gnat_gimplify_function (tree);
196 static tree float_type_for_precision (int, enum machine_mode);
197 static tree convert_to_fat_pointer (tree, tree);
198 static tree convert_to_thin_pointer (tree, tree);
199 static tree make_descriptor_field (const char *,tree, tree, tree);
200 static bool potential_alignment_gap (tree, tree, tree);
202 /* Initialize the association of GNAT nodes to GCC trees. */
205 init_gnat_to_gnu (void)
207 associate_gnat_to_gnu
208 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
211 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
212 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
213 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
215 If GNU_DECL is zero, a previous association is to be reset. */
218 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
220 /* Check that GNAT_ENTITY is not already defined and that it is being set
221 to something which is a decl. Raise gigi 401 if not. Usually, this
222 means GNAT_ENTITY is defined twice, but occasionally is due to some
224 gcc_assert (!(gnu_decl
225 && (PRESENT_GNU_TREE (gnat_entity)
226 || (!no_check && !DECL_P (gnu_decl)))));
228 SET_GNU_TREE (gnat_entity, gnu_decl);
231 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
232 Return the ..._DECL node that was associated with it. If there is no tree
233 node associated with GNAT_ENTITY, abort.
235 In some cases, such as delayed elaboration or expressions that need to
236 be elaborated only once, GNAT_ENTITY is really not an entity. */
239 get_gnu_tree (Entity_Id gnat_entity)
241 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
242 return GET_GNU_TREE (gnat_entity);
245 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
248 present_gnu_tree (Entity_Id gnat_entity)
250 return PRESENT_GNU_TREE (gnat_entity);
253 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
256 init_dummy_type (void)
259 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
262 /* Make a dummy type corresponding to GNAT_TYPE. */
265 make_dummy_type (Entity_Id gnat_type)
267 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
270 /* If there is an equivalent type, get its underlying type. */
271 if (Present (gnat_underlying))
272 gnat_underlying = Underlying_Type (gnat_underlying);
274 /* If there was no equivalent type (can only happen when just annotating
275 types) or underlying type, go back to the original type. */
276 if (No (gnat_underlying))
277 gnat_underlying = gnat_type;
279 /* If it there already a dummy type, use that one. Else make one. */
280 if (PRESENT_DUMMY_NODE (gnat_underlying))
281 return GET_DUMMY_NODE (gnat_underlying);
283 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
285 gnu_type = make_node (Is_Record_Type (gnat_underlying)
286 ? tree_code_for_record_type (gnat_underlying)
288 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
289 TYPE_DUMMY_P (gnu_type) = 1;
290 TYPE_STUB_DECL (gnu_type)
291 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
292 if (AGGREGATE_TYPE_P (gnu_type))
293 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
295 SET_DUMMY_NODE (gnat_underlying, gnu_type);
300 /* Return nonzero if we are currently in the global binding level. */
303 global_bindings_p (void)
305 return ((force_global || !current_function_decl) ? -1 : 0);
308 /* Enter a new binding level. */
313 struct gnat_binding_level *newlevel = NULL;
315 /* Reuse a struct for this binding level, if there is one. */
316 if (free_binding_level)
318 newlevel = free_binding_level;
319 free_binding_level = free_binding_level->chain;
323 = (struct gnat_binding_level *)
324 ggc_alloc (sizeof (struct gnat_binding_level));
326 /* Use a free BLOCK, if any; otherwise, allocate one. */
327 if (free_block_chain)
329 newlevel->block = free_block_chain;
330 free_block_chain = BLOCK_CHAIN (free_block_chain);
331 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
334 newlevel->block = make_node (BLOCK);
336 /* Point the BLOCK we just made to its parent. */
337 if (current_binding_level)
338 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
340 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
341 TREE_USED (newlevel->block) = 1;
343 /* Add this level to the front of the chain (stack) of levels that are
345 newlevel->chain = current_binding_level;
346 newlevel->jmpbuf_decl = NULL_TREE;
347 current_binding_level = newlevel;
350 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
351 and point FNDECL to this BLOCK. */
354 set_current_block_context (tree fndecl)
356 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
357 DECL_INITIAL (fndecl) = current_binding_level->block;
360 /* Set the jmpbuf_decl for the current binding level to DECL. */
363 set_block_jmpbuf_decl (tree decl)
365 current_binding_level->jmpbuf_decl = decl;
368 /* Get the jmpbuf_decl, if any, for the current binding level. */
371 get_block_jmpbuf_decl ()
373 return current_binding_level->jmpbuf_decl;
376 /* Exit a binding level. Set any BLOCK into the current code group. */
381 struct gnat_binding_level *level = current_binding_level;
382 tree block = level->block;
384 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
385 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
387 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
388 are no variables free the block and merge its subblocks into those of its
389 parent block. Otherwise, add it to the list of its parent. */
390 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
392 else if (BLOCK_VARS (block) == NULL_TREE)
394 BLOCK_SUBBLOCKS (level->chain->block)
395 = chainon (BLOCK_SUBBLOCKS (block),
396 BLOCK_SUBBLOCKS (level->chain->block));
397 BLOCK_CHAIN (block) = free_block_chain;
398 free_block_chain = block;
402 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
403 BLOCK_SUBBLOCKS (level->chain->block) = block;
404 TREE_USED (block) = 1;
405 set_block_for_group (block);
408 /* Free this binding structure. */
409 current_binding_level = level->chain;
410 level->chain = free_binding_level;
411 free_binding_level = level;
415 /* Records a ..._DECL node DECL as belonging to the current lexical scope
416 and uses GNAT_NODE for location information and propagating flags. */
419 gnat_pushdecl (tree decl, Node_Id gnat_node)
421 /* If this decl is public external or at toplevel, there is no context.
422 But PARM_DECLs always go in the level of its function. */
423 if (TREE_CODE (decl) != PARM_DECL
424 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
425 || global_bindings_p ()))
426 DECL_CONTEXT (decl) = 0;
429 DECL_CONTEXT (decl) = current_function_decl;
431 /* Functions imported in another function are not really nested. */
432 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
433 DECL_NO_STATIC_CHAIN (decl) = 1;
436 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
438 /* Set the location of DECL and emit a declaration for it. */
439 if (Present (gnat_node))
440 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
441 add_decl_expr (decl, gnat_node);
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later. Put global variables in the
445 globals list and builtin functions in a dedicated list to speed up
446 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
447 the list, as they will cause trouble with the debugger and aren't needed
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
452 if (global_bindings_p ())
454 VEC_safe_push (tree, gc, global_decls, decl);
456 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
457 VEC_safe_push (tree, gc, builtin_decls, decl);
461 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
462 BLOCK_VARS (current_binding_level->block) = decl;
466 /* For the declaration of a type, set its name if it either is not already
467 set or if the previous type name was not derived from a source name.
468 We'd rather have the type named with a real name and all the pointer
469 types to the same object have the same POINTER_TYPE node. Code in the
470 equivalent function of c-decl.c makes a copy of the type node here, but
471 that may cause us trouble with incomplete types. We make an exception
472 for fat pointer types because the compiler automatically builds them
473 for unconstrained array types and the debugger uses them to represent
474 both these and pointers to these. */
475 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
477 tree t = TREE_TYPE (decl);
479 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
481 else if (TYPE_FAT_POINTER_P (t))
483 tree tt = build_variant_type_copy (t);
484 TYPE_NAME (tt) = decl;
485 TREE_USED (tt) = TREE_USED (t);
486 TREE_TYPE (decl) = tt;
487 DECL_ORIGINAL_TYPE (decl) = t;
490 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
495 /* Propagate the name to all the variants. This is needed for
496 the type qualifiers machinery to work properly. */
498 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
499 TYPE_NAME (t) = decl;
503 /* Do little here. Set up the standard declarations later after the
504 front end has been run. */
507 gnat_init_decl_processing (void)
509 /* Make the binding_level structure for global names. */
510 current_function_decl = 0;
511 current_binding_level = 0;
512 free_binding_level = 0;
515 build_common_tree_nodes (true, true);
517 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
518 corresponding to the size of Pmode. In most cases when ptr_mode and
519 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
520 far better code using the width of Pmode. Make this here since we need
521 this before we can expand the GNAT types. */
522 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
523 set_sizetype (size_type_node);
525 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
526 boolean_type_node = make_node (BOOLEAN_TYPE);
527 TYPE_PRECISION (boolean_type_node) = 1;
528 fixup_unsigned_type (boolean_type_node);
529 TYPE_RM_SIZE_NUM (boolean_type_node) = bitsize_int (1);
531 build_common_tree_nodes_2 (0);
533 ptr_void_type_node = build_pointer_type (void_type_node);
536 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
539 record_builtin_type (const char *name, tree type)
541 tree type_decl = build_decl (TYPE_DECL, get_identifier (name), type);
543 gnat_pushdecl (type_decl, Empty);
545 if (debug_hooks->type_decl)
546 debug_hooks->type_decl (type_decl, false);
549 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
550 finish constructing the record or union type. If REP_LEVEL is zero, this
551 record has no representation clause and so will be entirely laid out here.
552 If REP_LEVEL is one, this record has a representation clause and has been
553 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
554 this record is derived from a parent record and thus inherits its layout;
555 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
556 true, the record type is expected to be modified afterwards so it will
557 not be sent to the back-end for finalization. */
560 finish_record_type (tree record_type, tree fieldlist, int rep_level,
561 bool do_not_finalize)
563 enum tree_code code = TREE_CODE (record_type);
564 tree name = TYPE_NAME (record_type);
565 tree ada_size = bitsize_zero_node;
566 tree size = bitsize_zero_node;
567 bool had_size = TYPE_SIZE (record_type) != 0;
568 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
569 bool had_align = TYPE_ALIGN (record_type) != 0;
572 TYPE_FIELDS (record_type) = fieldlist;
574 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
575 generate debug info and have a parallel type. */
576 if (name && TREE_CODE (name) == TYPE_DECL)
577 name = DECL_NAME (name);
578 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
580 /* Globally initialize the record first. If this is a rep'ed record,
581 that just means some initializations; otherwise, layout the record. */
584 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
585 SET_TYPE_MODE (record_type, BLKmode);
588 TYPE_SIZE_UNIT (record_type) = size_zero_node;
590 TYPE_SIZE (record_type) = bitsize_zero_node;
592 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
593 out just like a UNION_TYPE, since the size will be fixed. */
594 else if (code == QUAL_UNION_TYPE)
599 /* Ensure there isn't a size already set. There can be in an error
600 case where there is a rep clause but all fields have errors and
601 no longer have a position. */
602 TYPE_SIZE (record_type) = 0;
603 layout_type (record_type);
606 /* At this point, the position and size of each field is known. It was
607 either set before entry by a rep clause, or by laying out the type above.
609 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
610 to compute the Ada size; the GCC size and alignment (for rep'ed records
611 that are not padding types); and the mode (for rep'ed records). We also
612 clear the DECL_BIT_FIELD indication for the cases we know have not been
613 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
615 if (code == QUAL_UNION_TYPE)
616 fieldlist = nreverse (fieldlist);
618 for (field = fieldlist; field; field = TREE_CHAIN (field))
620 tree type = TREE_TYPE (field);
621 tree pos = bit_position (field);
622 tree this_size = DECL_SIZE (field);
625 if ((TREE_CODE (type) == RECORD_TYPE
626 || TREE_CODE (type) == UNION_TYPE
627 || TREE_CODE (type) == QUAL_UNION_TYPE)
628 && !TYPE_IS_FAT_POINTER_P (type)
629 && !TYPE_CONTAINS_TEMPLATE_P (type)
630 && TYPE_ADA_SIZE (type))
631 this_ada_size = TYPE_ADA_SIZE (type);
633 this_ada_size = this_size;
635 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
636 if (DECL_BIT_FIELD (field)
637 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
639 unsigned int align = TYPE_ALIGN (type);
641 /* In the general case, type alignment is required. */
642 if (value_factor_p (pos, align))
644 /* The enclosing record type must be sufficiently aligned.
645 Otherwise, if no alignment was specified for it and it
646 has been laid out already, bump its alignment to the
647 desired one if this is compatible with its size. */
648 if (TYPE_ALIGN (record_type) >= align)
650 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
651 DECL_BIT_FIELD (field) = 0;
655 && value_factor_p (TYPE_SIZE (record_type), align))
657 TYPE_ALIGN (record_type) = align;
658 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
659 DECL_BIT_FIELD (field) = 0;
663 /* In the non-strict alignment case, only byte alignment is. */
664 if (!STRICT_ALIGNMENT
665 && DECL_BIT_FIELD (field)
666 && value_factor_p (pos, BITS_PER_UNIT))
667 DECL_BIT_FIELD (field) = 0;
670 /* If we still have DECL_BIT_FIELD set at this point, we know the field
671 is technically not addressable. Except that it can actually be
672 addressed if the field is BLKmode and happens to be properly
674 DECL_NONADDRESSABLE_P (field)
675 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
677 /* A type must be as aligned as its most aligned field that is not
678 a bit-field. But this is already enforced by layout_type. */
679 if (rep_level > 0 && !DECL_BIT_FIELD (field))
680 TYPE_ALIGN (record_type)
681 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
686 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
687 size = size_binop (MAX_EXPR, size, this_size);
690 case QUAL_UNION_TYPE:
692 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
693 this_ada_size, ada_size);
694 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
699 /* Since we know here that all fields are sorted in order of
700 increasing bit position, the size of the record is one
701 higher than the ending bit of the last field processed
702 unless we have a rep clause, since in that case we might
703 have a field outside a QUAL_UNION_TYPE that has a higher ending
704 position. So use a MAX in that case. Also, if this field is a
705 QUAL_UNION_TYPE, we need to take into account the previous size in
706 the case of empty variants. */
708 = merge_sizes (ada_size, pos, this_ada_size,
709 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
711 = merge_sizes (size, pos, this_size,
712 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
720 if (code == QUAL_UNION_TYPE)
721 nreverse (fieldlist);
723 /* If the type is discriminated, it can be used to access all its
724 constrained subtypes, so force structural equality checks. */
725 if (CONTAINS_PLACEHOLDER_P (size))
726 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
730 /* If this is a padding record, we never want to make the size smaller
731 than what was specified in it, if any. */
732 if (TREE_CODE (record_type) == RECORD_TYPE
733 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
734 size = TYPE_SIZE (record_type);
736 /* Now set any of the values we've just computed that apply. */
737 if (!TYPE_IS_FAT_POINTER_P (record_type)
738 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
739 SET_TYPE_ADA_SIZE (record_type, ada_size);
743 tree size_unit = had_size_unit
744 ? TYPE_SIZE_UNIT (record_type)
746 size_binop (CEIL_DIV_EXPR, size,
748 unsigned int align = TYPE_ALIGN (record_type);
750 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
751 TYPE_SIZE_UNIT (record_type)
752 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
754 compute_record_mode (record_type);
758 if (!do_not_finalize)
759 rest_of_record_type_compilation (record_type);
762 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
763 the debug information associated with it. It need not be invoked
764 directly in most cases since finish_record_type takes care of doing
765 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
768 rest_of_record_type_compilation (tree record_type)
770 tree fieldlist = TYPE_FIELDS (record_type);
772 enum tree_code code = TREE_CODE (record_type);
773 bool var_size = false;
775 for (field = fieldlist; field; field = TREE_CHAIN (field))
777 /* We need to make an XVE/XVU record if any field has variable size,
778 whether or not the record does. For example, if we have a union,
779 it may be that all fields, rounded up to the alignment, have the
780 same size, in which case we'll use that size. But the debug
781 output routines (except Dwarf2) won't be able to output the fields,
782 so we need to make the special record. */
783 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
784 /* If a field has a non-constant qualifier, the record will have
785 variable size too. */
786 || (code == QUAL_UNION_TYPE
787 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
794 /* If this record is of variable size, rename it so that the
795 debugger knows it is and make a new, parallel, record
796 that tells the debugger how the record is laid out. See
797 exp_dbug.ads. But don't do this for records that are padding
798 since they confuse GDB. */
800 && !(TREE_CODE (record_type) == RECORD_TYPE
801 && TYPE_IS_PADDING_P (record_type)))
804 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
805 ? UNION_TYPE : TREE_CODE (record_type));
806 tree orig_name = TYPE_NAME (record_type);
808 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
811 = concat_id_with_name (orig_id,
812 TREE_CODE (record_type) == QUAL_UNION_TYPE
814 tree last_pos = bitsize_zero_node;
816 tree prev_old_field = 0;
818 TYPE_NAME (new_record_type) = new_id;
819 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
820 TYPE_STUB_DECL (new_record_type)
821 = create_type_stub_decl (new_id, new_record_type);
822 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
823 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
824 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
825 TYPE_SIZE_UNIT (new_record_type)
826 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
828 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
830 /* Now scan all the fields, replacing each field with a new
831 field corresponding to the new encoding. */
832 for (old_field = TYPE_FIELDS (record_type); old_field;
833 old_field = TREE_CHAIN (old_field))
835 tree field_type = TREE_TYPE (old_field);
836 tree field_name = DECL_NAME (old_field);
838 tree curpos = bit_position (old_field);
840 unsigned int align = 0;
843 /* See how the position was modified from the last position.
845 There are two basic cases we support: a value was added
846 to the last position or the last position was rounded to
847 a boundary and they something was added. Check for the
848 first case first. If not, see if there is any evidence
849 of rounding. If so, round the last position and try
852 If this is a union, the position can be taken as zero. */
854 /* Some computations depend on the shape of the position expression,
855 so strip conversions to make sure it's exposed. */
856 curpos = remove_conversions (curpos, true);
858 if (TREE_CODE (new_record_type) == UNION_TYPE)
859 pos = bitsize_zero_node, align = 0;
861 pos = compute_related_constant (curpos, last_pos);
863 if (!pos && TREE_CODE (curpos) == MULT_EXPR
864 && host_integerp (TREE_OPERAND (curpos, 1), 1))
866 tree offset = TREE_OPERAND (curpos, 0);
867 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
869 /* An offset which is a bitwise AND with a negative power of 2
870 means an alignment corresponding to this power of 2. */
871 offset = remove_conversions (offset, true);
872 if (TREE_CODE (offset) == BIT_AND_EXPR
873 && host_integerp (TREE_OPERAND (offset, 1), 0)
874 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
877 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
878 if (exact_log2 (pow) > 0)
882 pos = compute_related_constant (curpos,
883 round_up (last_pos, align));
885 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
886 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
887 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
888 && host_integerp (TREE_OPERAND
889 (TREE_OPERAND (curpos, 0), 1),
894 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
895 pos = compute_related_constant (curpos,
896 round_up (last_pos, align));
898 else if (potential_alignment_gap (prev_old_field, old_field,
901 align = TYPE_ALIGN (field_type);
902 pos = compute_related_constant (curpos,
903 round_up (last_pos, align));
906 /* If we can't compute a position, set it to zero.
908 ??? We really should abort here, but it's too much work
909 to get this correct for all cases. */
912 pos = bitsize_zero_node;
914 /* See if this type is variable-sized and make a pointer type
915 and indicate the indirection if so. Beware that the debug
916 back-end may adjust the position computed above according
917 to the alignment of the field type, i.e. the pointer type
918 in this case, if we don't preventively counter that. */
919 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
921 field_type = build_pointer_type (field_type);
922 if (align != 0 && TYPE_ALIGN (field_type) > align)
924 field_type = copy_node (field_type);
925 TYPE_ALIGN (field_type) = align;
930 /* Make a new field name, if necessary. */
931 if (var || align != 0)
936 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
937 align / BITS_PER_UNIT);
939 strcpy (suffix, "XVL");
941 field_name = concat_id_with_name (field_name, suffix);
944 new_field = create_field_decl (field_name, field_type,
946 DECL_SIZE (old_field), pos, 0);
947 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
948 TYPE_FIELDS (new_record_type) = new_field;
950 /* If old_field is a QUAL_UNION_TYPE, take its size as being
951 zero. The only time it's not the last field of the record
952 is when there are other components at fixed positions after
953 it (meaning there was a rep clause for every field) and we
954 want to be able to encode them. */
955 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
956 (TREE_CODE (TREE_TYPE (old_field))
959 : DECL_SIZE (old_field));
960 prev_old_field = old_field;
963 TYPE_FIELDS (new_record_type)
964 = nreverse (TYPE_FIELDS (new_record_type));
966 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
969 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
972 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
975 add_parallel_type (tree decl, tree parallel_type)
979 while (DECL_PARALLEL_TYPE (d))
980 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
982 SET_DECL_PARALLEL_TYPE (d, parallel_type);
985 /* Return the parallel type associated to a type, if any. */
988 get_parallel_type (tree type)
990 if (TYPE_STUB_DECL (type))
991 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
996 /* Utility function of above to merge LAST_SIZE, the previous size of a record
997 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
998 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
999 replace a value of zero with the old size. If HAS_REP is true, we take the
1000 MAX of the end position of this field with LAST_SIZE. In all other cases,
1001 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1004 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1007 tree type = TREE_TYPE (last_size);
1010 if (!special || TREE_CODE (size) != COND_EXPR)
1012 new = size_binop (PLUS_EXPR, first_bit, size);
1014 new = size_binop (MAX_EXPR, last_size, new);
1018 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1019 integer_zerop (TREE_OPERAND (size, 1))
1020 ? last_size : merge_sizes (last_size, first_bit,
1021 TREE_OPERAND (size, 1),
1023 integer_zerop (TREE_OPERAND (size, 2))
1024 ? last_size : merge_sizes (last_size, first_bit,
1025 TREE_OPERAND (size, 2),
1028 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1029 when fed through substitute_in_expr) into thinking that a constant
1030 size is not constant. */
1031 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1032 new = TREE_OPERAND (new, 0);
1037 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1038 related by the addition of a constant. Return that constant if so. */
1041 compute_related_constant (tree op0, tree op1)
1043 tree op0_var, op1_var;
1044 tree op0_con = split_plus (op0, &op0_var);
1045 tree op1_con = split_plus (op1, &op1_var);
1046 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1048 if (operand_equal_p (op0_var, op1_var, 0))
1050 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1056 /* Utility function of above to split a tree OP which may be a sum, into a
1057 constant part, which is returned, and a variable part, which is stored
1058 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1062 split_plus (tree in, tree *pvar)
1064 /* Strip NOPS in order to ease the tree traversal and maximize the
1065 potential for constant or plus/minus discovery. We need to be careful
1066 to always return and set *pvar to bitsizetype trees, but it's worth
1070 *pvar = convert (bitsizetype, in);
1072 if (TREE_CODE (in) == INTEGER_CST)
1074 *pvar = bitsize_zero_node;
1075 return convert (bitsizetype, in);
1077 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1079 tree lhs_var, rhs_var;
1080 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1081 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1083 if (lhs_var == TREE_OPERAND (in, 0)
1084 && rhs_var == TREE_OPERAND (in, 1))
1085 return bitsize_zero_node;
1087 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1088 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1091 return bitsize_zero_node;
1094 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1095 subprogram. If it is void_type_node, then we are dealing with a procedure,
1096 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1097 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1098 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1099 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1100 object. RETURNS_BY_REF is true if the function returns by reference.
1101 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1102 first parameter) the address of the place to copy its result. */
1105 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1106 bool returns_unconstrained, bool returns_by_ref,
1107 bool returns_by_target_ptr)
1109 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1110 the subprogram formal parameters. This list is generated by traversing the
1111 input list of PARM_DECL nodes. */
1112 tree param_type_list = NULL;
1116 for (param_decl = param_decl_list; param_decl;
1117 param_decl = TREE_CHAIN (param_decl))
1118 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1121 /* The list of the function parameter types has to be terminated by the void
1122 type to signal to the back-end that we are not dealing with a variable
1123 parameter subprogram, but that the subprogram has a fixed number of
1125 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1127 /* The list of argument types has been created in reverse
1129 param_type_list = nreverse (param_type_list);
1131 type = build_function_type (return_type, param_type_list);
1133 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1134 or the new type should, make a copy of TYPE. Likewise for
1135 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1136 if (TYPE_CI_CO_LIST (type) || cico_list
1137 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1138 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1139 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1140 type = copy_type (type);
1142 TYPE_CI_CO_LIST (type) = cico_list;
1143 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1144 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1145 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1149 /* Return a copy of TYPE but safe to modify in any way. */
1152 copy_type (tree type)
1154 tree new = copy_node (type);
1156 /* copy_node clears this field instead of copying it, because it is
1157 aliased with TREE_CHAIN. */
1158 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1160 TYPE_POINTER_TO (new) = 0;
1161 TYPE_REFERENCE_TO (new) = 0;
1162 TYPE_MAIN_VARIANT (new) = new;
1163 TYPE_NEXT_VARIANT (new) = 0;
1168 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1169 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1173 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1175 /* First build a type for the desired range. */
1176 tree type = build_index_2_type (min, max);
1178 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1179 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1180 is set, but not to INDEX, make a copy of this type with the requested
1181 index type. Note that we have no way of sharing these types, but that's
1182 only a small hole. */
1183 if (TYPE_INDEX_TYPE (type) == index)
1185 else if (TYPE_INDEX_TYPE (type))
1186 type = copy_type (type);
1188 SET_TYPE_INDEX_TYPE (type, index);
1189 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1193 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1194 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1198 create_type_stub_decl (tree type_name, tree type)
1200 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1201 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1202 emitted in DWARF. */
1203 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1204 DECL_ARTIFICIAL (type_decl) = 1;
1208 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1209 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1210 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1211 true if we need to write debug information about this type. GNAT_NODE
1212 is used for the position of the decl. */
1215 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1216 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1218 enum tree_code code = TREE_CODE (type);
1219 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1222 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1223 gcc_assert (!TYPE_IS_DUMMY_P (type));
1225 /* If the type hasn't been named yet, we're naming it; preserve an existing
1226 TYPE_STUB_DECL that has been attached to it for some purpose. */
1227 if (!named && TYPE_STUB_DECL (type))
1229 type_decl = TYPE_STUB_DECL (type);
1230 DECL_NAME (type_decl) = type_name;
1233 type_decl = build_decl (TYPE_DECL, type_name, type);
1235 DECL_ARTIFICIAL (type_decl) = artificial_p;
1236 gnat_pushdecl (type_decl, gnat_node);
1237 process_attributes (type_decl, attr_list);
1239 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1240 This causes the name to be also viewed as a "tag" by the debug
1241 back-end, with the advantage that no DW_TAG_typedef is emitted
1242 for artificial "tagged" types in DWARF. */
1244 TYPE_STUB_DECL (type) = type_decl;
1246 /* Pass the type declaration to the debug back-end unless this is an
1247 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1248 type for which debugging information was not requested, or else an
1249 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1250 handled separately. And do not pass dummy types either. */
1251 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1252 DECL_IGNORED_P (type_decl) = 1;
1253 else if (code != ENUMERAL_TYPE
1254 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1255 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1256 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1257 && !(code == RECORD_TYPE
1259 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1260 rest_of_type_decl_compilation (type_decl);
1265 /* Return a VAR_DECL or CONST_DECL node.
1267 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1268 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1269 the GCC tree for an optional initial expression; NULL_TREE if none.
1271 CONST_FLAG is true if this variable is constant, in which case we might
1272 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1274 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1275 definition to be made visible outside of the current compilation unit, for
1276 instance variable definitions in a package specification.
1278 EXTERN_FLAG is true when processing an external variable declaration (as
1279 opposed to a definition: no storage is to be allocated for the variable).
1281 STATIC_FLAG is only relevant when not at top level. In that case
1282 it indicates whether to always allocate storage to the variable.
1284 GNAT_NODE is used for the position of the decl. */
1287 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1288 bool const_flag, bool public_flag, bool extern_flag,
1289 bool static_flag, bool const_decl_allowed_p,
1290 struct attrib *attr_list, Node_Id gnat_node)
1294 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1295 && (global_bindings_p () || static_flag
1296 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1297 : TREE_CONSTANT (var_init)));
1299 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1300 case the initializer may be used in-lieu of the DECL node (as done in
1301 Identifier_to_gnu). This is useful to prevent the need of elaboration
1302 code when an identifier for which such a decl is made is in turn used as
1303 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1304 but extra constraints apply to this choice (see below) and are not
1305 relevant to the distinction we wish to make. */
1306 bool constant_p = const_flag && init_const;
1308 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1309 and may be used for scalars in general but not for aggregates. */
1311 = build_decl ((constant_p && const_decl_allowed_p
1312 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1315 /* If this is external, throw away any initializations (they will be done
1316 elsewhere) unless this is a constant for which we would like to remain
1317 able to get the initializer. If we are defining a global here, leave a
1318 constant initialization and save any variable elaborations for the
1319 elaboration routine. If we are just annotating types, throw away the
1320 initialization if it isn't a constant. */
1321 if ((extern_flag && !constant_p)
1322 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1323 var_init = NULL_TREE;
1325 /* At the global level, an initializer requiring code to be generated
1326 produces elaboration statements. Check that such statements are allowed,
1327 that is, not violating a No_Elaboration_Code restriction. */
1328 if (global_bindings_p () && var_init != 0 && ! init_const)
1329 Check_Elaboration_Code_Allowed (gnat_node);
1331 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1332 try to fiddle with DECL_COMMON. However, on platforms that don't
1333 support global BSS sections, uninitialized global variables would
1334 go in DATA instead, thus increasing the size of the executable. */
1336 && TREE_CODE (var_decl) == VAR_DECL
1337 && !have_global_bss_p ())
1338 DECL_COMMON (var_decl) = 1;
1339 DECL_INITIAL (var_decl) = var_init;
1340 TREE_READONLY (var_decl) = const_flag;
1341 DECL_EXTERNAL (var_decl) = extern_flag;
1342 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1343 TREE_CONSTANT (var_decl) = constant_p;
1344 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1345 = TYPE_VOLATILE (type);
1347 /* If it's public and not external, always allocate storage for it.
1348 At the global binding level we need to allocate static storage for the
1349 variable if and only if it's not external. If we are not at the top level
1350 we allocate automatic storage unless requested not to. */
1351 TREE_STATIC (var_decl)
1352 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1354 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1355 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1357 process_attributes (var_decl, attr_list);
1359 /* Add this decl to the current binding level. */
1360 gnat_pushdecl (var_decl, gnat_node);
1362 if (TREE_SIDE_EFFECTS (var_decl))
1363 TREE_ADDRESSABLE (var_decl) = 1;
1365 if (TREE_CODE (var_decl) != CONST_DECL)
1367 if (global_bindings_p ())
1368 rest_of_decl_compilation (var_decl, true, 0);
1371 expand_decl (var_decl);
1376 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1379 aggregate_type_contains_array_p (tree type)
1381 switch (TREE_CODE (type))
1385 case QUAL_UNION_TYPE:
1388 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1389 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1390 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1403 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1404 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1405 this field is in a record type with a "pragma pack". If SIZE is nonzero
1406 it is the specified size for this field. If POS is nonzero, it is the bit
1407 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1408 the address of this field for aliasing purposes. If it is negative, we
1409 should not make a bitfield, which is used by make_aligning_type. */
1412 create_field_decl (tree field_name, tree field_type, tree record_type,
1413 int packed, tree size, tree pos, int addressable)
1415 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1417 DECL_CONTEXT (field_decl) = record_type;
1418 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1420 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1421 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1422 Likewise for an aggregate without specified position that contains an
1423 array, because in this case slices of variable length of this array
1424 must be handled by GCC and variable-sized objects need to be aligned
1425 to at least a byte boundary. */
1426 if (packed && (TYPE_MODE (field_type) == BLKmode
1428 && AGGREGATE_TYPE_P (field_type)
1429 && aggregate_type_contains_array_p (field_type))))
1430 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1432 /* If a size is specified, use it. Otherwise, if the record type is packed
1433 compute a size to use, which may differ from the object's natural size.
1434 We always set a size in this case to trigger the checks for bitfield
1435 creation below, which is typically required when no position has been
1438 size = convert (bitsizetype, size);
1439 else if (packed == 1)
1441 size = rm_size (field_type);
1443 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1445 if (TREE_CODE (size) == INTEGER_CST
1446 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1447 size = round_up (size, BITS_PER_UNIT);
1450 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1451 specified for two reasons: first if the size differs from the natural
1452 size. Second, if the alignment is insufficient. There are a number of
1453 ways the latter can be true.
1455 We never make a bitfield if the type of the field has a nonconstant size,
1456 because no such entity requiring bitfield operations should reach here.
1458 We do *preventively* make a bitfield when there might be the need for it
1459 but we don't have all the necessary information to decide, as is the case
1460 of a field with no specified position in a packed record.
1462 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1463 in layout_decl or finish_record_type to clear the bit_field indication if
1464 it is in fact not needed. */
1465 if (addressable >= 0
1467 && TREE_CODE (size) == INTEGER_CST
1468 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1469 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1470 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1472 || (TYPE_ALIGN (record_type) != 0
1473 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1475 DECL_BIT_FIELD (field_decl) = 1;
1476 DECL_SIZE (field_decl) = size;
1477 if (!packed && !pos)
1478 DECL_ALIGN (field_decl)
1479 = (TYPE_ALIGN (record_type) != 0
1480 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1481 : TYPE_ALIGN (field_type));
1484 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1486 /* Bump the alignment if need be, either for bitfield/packing purposes or
1487 to satisfy the type requirements if no such consideration applies. When
1488 we get the alignment from the type, indicate if this is from an explicit
1489 user request, which prevents stor-layout from lowering it later on. */
1491 unsigned int bit_align
1492 = (DECL_BIT_FIELD (field_decl) ? 1
1493 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1495 if (bit_align > DECL_ALIGN (field_decl))
1496 DECL_ALIGN (field_decl) = bit_align;
1497 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1499 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1500 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1506 /* We need to pass in the alignment the DECL is known to have.
1507 This is the lowest-order bit set in POS, but no more than
1508 the alignment of the record, if one is specified. Note
1509 that an alignment of 0 is taken as infinite. */
1510 unsigned int known_align;
1512 if (host_integerp (pos, 1))
1513 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1515 known_align = BITS_PER_UNIT;
1517 if (TYPE_ALIGN (record_type)
1518 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1519 known_align = TYPE_ALIGN (record_type);
1521 layout_decl (field_decl, known_align);
1522 SET_DECL_OFFSET_ALIGN (field_decl,
1523 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1525 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1526 &DECL_FIELD_BIT_OFFSET (field_decl),
1527 DECL_OFFSET_ALIGN (field_decl), pos);
1530 /* In addition to what our caller says, claim the field is addressable if we
1531 know that its type is not suitable.
1533 The field may also be "technically" nonaddressable, meaning that even if
1534 we attempt to take the field's address we will actually get the address
1535 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1536 value we have at this point is not accurate enough, so we don't account
1537 for this here and let finish_record_type decide. */
1538 if (!addressable && !type_for_nonaliased_component_p (field_type))
1541 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1546 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1547 PARAM_TYPE is its type. READONLY is true if the parameter is
1548 readonly (either an In parameter or an address of a pass-by-ref
1552 create_param_decl (tree param_name, tree param_type, bool readonly)
1554 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1556 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1557 lead to various ABI violations. */
1558 if (targetm.calls.promote_prototypes (param_type)
1559 && (TREE_CODE (param_type) == INTEGER_TYPE
1560 || TREE_CODE (param_type) == ENUMERAL_TYPE
1561 || TREE_CODE (param_type) == BOOLEAN_TYPE)
1562 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1564 /* We have to be careful about biased types here. Make a subtype
1565 of integer_type_node with the proper biasing. */
1566 if (TREE_CODE (param_type) == INTEGER_TYPE
1567 && TYPE_BIASED_REPRESENTATION_P (param_type))
1570 = copy_type (build_range_type (integer_type_node,
1571 TYPE_MIN_VALUE (param_type),
1572 TYPE_MAX_VALUE (param_type)));
1574 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1577 param_type = integer_type_node;
1580 DECL_ARG_TYPE (param_decl) = param_type;
1581 TREE_READONLY (param_decl) = readonly;
1585 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1588 process_attributes (tree decl, struct attrib *attr_list)
1590 for (; attr_list; attr_list = attr_list->next)
1591 switch (attr_list->type)
1593 case ATTR_MACHINE_ATTRIBUTE:
1594 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1596 ATTR_FLAG_TYPE_IN_PLACE);
1599 case ATTR_LINK_ALIAS:
1600 if (! DECL_EXTERNAL (decl))
1602 TREE_STATIC (decl) = 1;
1603 assemble_alias (decl, attr_list->name);
1607 case ATTR_WEAK_EXTERNAL:
1609 declare_weak (decl);
1611 post_error ("?weak declarations not supported on this target",
1612 attr_list->error_point);
1615 case ATTR_LINK_SECTION:
1616 if (targetm.have_named_sections)
1618 DECL_SECTION_NAME (decl)
1619 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1620 IDENTIFIER_POINTER (attr_list->name));
1621 DECL_COMMON (decl) = 0;
1624 post_error ("?section attributes are not supported for this target",
1625 attr_list->error_point);
1628 case ATTR_LINK_CONSTRUCTOR:
1629 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1630 TREE_USED (decl) = 1;
1633 case ATTR_LINK_DESTRUCTOR:
1634 DECL_STATIC_DESTRUCTOR (decl) = 1;
1635 TREE_USED (decl) = 1;
1638 case ATTR_THREAD_LOCAL_STORAGE:
1639 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1640 DECL_COMMON (decl) = 0;
1645 /* Record a global renaming pointer. */
1648 record_global_renaming_pointer (tree decl)
1650 gcc_assert (DECL_RENAMED_OBJECT (decl));
1651 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1654 /* Invalidate the global renaming pointers. */
1657 invalidate_global_renaming_pointers (void)
1662 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1663 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1665 VEC_free (tree, gc, global_renaming_pointers);
1668 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1672 value_factor_p (tree value, HOST_WIDE_INT factor)
1674 if (host_integerp (value, 1))
1675 return tree_low_cst (value, 1) % factor == 0;
1677 if (TREE_CODE (value) == MULT_EXPR)
1678 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1679 || value_factor_p (TREE_OPERAND (value, 1), factor));
1684 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1685 unless we can prove these 2 fields are laid out in such a way that no gap
1686 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1687 is the distance in bits between the end of PREV_FIELD and the starting
1688 position of CURR_FIELD. It is ignored if null. */
1691 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1693 /* If this is the first field of the record, there cannot be any gap */
1697 /* If the previous field is a union type, then return False: The only
1698 time when such a field is not the last field of the record is when
1699 there are other components at fixed positions after it (meaning there
1700 was a rep clause for every field), in which case we don't want the
1701 alignment constraint to override them. */
1702 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1705 /* If the distance between the end of prev_field and the beginning of
1706 curr_field is constant, then there is a gap if the value of this
1707 constant is not null. */
1708 if (offset && host_integerp (offset, 1))
1709 return !integer_zerop (offset);
1711 /* If the size and position of the previous field are constant,
1712 then check the sum of this size and position. There will be a gap
1713 iff it is not multiple of the current field alignment. */
1714 if (host_integerp (DECL_SIZE (prev_field), 1)
1715 && host_integerp (bit_position (prev_field), 1))
1716 return ((tree_low_cst (bit_position (prev_field), 1)
1717 + tree_low_cst (DECL_SIZE (prev_field), 1))
1718 % DECL_ALIGN (curr_field) != 0);
1720 /* If both the position and size of the previous field are multiples
1721 of the current field alignment, there cannot be any gap. */
1722 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1723 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1726 /* Fallback, return that there may be a potential gap */
1730 /* Returns a LABEL_DECL node for LABEL_NAME. */
1733 create_label_decl (tree label_name)
1735 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1737 DECL_CONTEXT (label_decl) = current_function_decl;
1738 DECL_MODE (label_decl) = VOIDmode;
1739 DECL_SOURCE_LOCATION (label_decl) = input_location;
1744 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1745 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1746 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1747 PARM_DECL nodes chained through the TREE_CHAIN field).
1749 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1750 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1753 create_subprog_decl (tree subprog_name, tree asm_name,
1754 tree subprog_type, tree param_decl_list, bool inline_flag,
1755 bool public_flag, bool extern_flag,
1756 struct attrib *attr_list, Node_Id gnat_node)
1758 tree return_type = TREE_TYPE (subprog_type);
1759 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1761 /* If this is a non-inline function nested inside an inlined external
1762 function, we cannot honor both requests without cloning the nested
1763 function in the current unit since it is private to the other unit.
1764 We could inline the nested function as well but it's probably better
1765 to err on the side of too little inlining. */
1767 && current_function_decl
1768 && DECL_DECLARED_INLINE_P (current_function_decl)
1769 && DECL_EXTERNAL (current_function_decl))
1770 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1772 DECL_EXTERNAL (subprog_decl) = extern_flag;
1773 TREE_PUBLIC (subprog_decl) = public_flag;
1774 TREE_STATIC (subprog_decl) = 1;
1775 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1776 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1777 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1778 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1779 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1780 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1781 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1782 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1784 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1785 target by-reference return mechanism. This is not supported all the
1786 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1787 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1788 the RESULT_DECL instead - see gnat_genericize for more details. */
1789 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1791 tree result_decl = DECL_RESULT (subprog_decl);
1793 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1794 DECL_BY_REFERENCE (result_decl) = 1;
1799 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1801 /* The expand_main_function circuitry expects "main_identifier_node" to
1802 designate the DECL_NAME of the 'main' entry point, in turn expected
1803 to be declared as the "main" function literally by default. Ada
1804 program entry points are typically declared with a different name
1805 within the binder generated file, exported as 'main' to satisfy the
1806 system expectations. Redirect main_identifier_node in this case. */
1807 if (asm_name == main_identifier_node)
1808 main_identifier_node = DECL_NAME (subprog_decl);
1811 process_attributes (subprog_decl, attr_list);
1813 /* Add this decl to the current binding level. */
1814 gnat_pushdecl (subprog_decl, gnat_node);
1816 /* Output the assembler code and/or RTL for the declaration. */
1817 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1819 return subprog_decl;
1822 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1823 body. This routine needs to be invoked before processing the declarations
1824 appearing in the subprogram. */
1827 begin_subprog_body (tree subprog_decl)
1831 current_function_decl = subprog_decl;
1832 announce_function (subprog_decl);
1834 /* Enter a new binding level and show that all the parameters belong to
1837 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1838 param_decl = TREE_CHAIN (param_decl))
1839 DECL_CONTEXT (param_decl) = subprog_decl;
1841 make_decl_rtl (subprog_decl);
1843 /* We handle pending sizes via the elaboration of types, so we don't need to
1844 save them. This causes them to be marked as part of the outer function
1845 and then discarded. */
1846 get_pending_sizes ();
1850 /* Helper for the genericization callback. Return a dereference of VAL
1851 if it is of a reference type. */
1854 convert_from_reference (tree val)
1856 tree value_type, ref;
1858 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1861 value_type = TREE_TYPE (TREE_TYPE (val));
1862 ref = build1 (INDIRECT_REF, value_type, val);
1864 /* See if what we reference is CONST or VOLATILE, which requires
1865 looking into array types to get to the component type. */
1867 while (TREE_CODE (value_type) == ARRAY_TYPE)
1868 value_type = TREE_TYPE (value_type);
1871 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1872 TREE_THIS_VOLATILE (ref)
1873 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1875 TREE_SIDE_EFFECTS (ref)
1876 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1881 /* Helper for the genericization callback. Returns true if T denotes
1882 a RESULT_DECL with DECL_BY_REFERENCE set. */
1885 is_byref_result (tree t)
1887 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1891 /* Tree walking callback for gnat_genericize. Currently ...
1893 o Adjust references to the function's DECL_RESULT if it is marked
1894 DECL_BY_REFERENCE and so has had its type turned into a reference
1895 type at the end of the function compilation. */
1898 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1900 /* This implementation is modeled after what the C++ front-end is
1901 doing, basis of the downstream passes behavior. */
1903 tree stmt = *stmt_p;
1904 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1906 /* If we have a direct mention of the result decl, dereference. */
1907 if (is_byref_result (stmt))
1909 *stmt_p = convert_from_reference (stmt);
1914 /* Otherwise, no need to walk the same tree twice. */
1915 if (pointer_set_contains (p_set, stmt))
1921 /* If we are taking the address of what now is a reference, just get the
1923 if (TREE_CODE (stmt) == ADDR_EXPR
1924 && is_byref_result (TREE_OPERAND (stmt, 0)))
1926 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1930 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1931 else if (TREE_CODE (stmt) == RETURN_EXPR
1932 && TREE_OPERAND (stmt, 0)
1933 && is_byref_result (TREE_OPERAND (stmt, 0)))
1936 /* Don't look inside trees that cannot embed references of interest. */
1937 else if (IS_TYPE_OR_DECL_P (stmt))
1940 pointer_set_insert (p_set, *stmt_p);
1945 /* Perform lowering of Ada trees to GENERIC. In particular:
1947 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
1948 and adjust all the references to this decl accordingly. */
1951 gnat_genericize (tree fndecl)
1953 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
1954 was handled by simply setting TREE_ADDRESSABLE on the result type.
1955 Everything required to actually pass by invisible ref using the target
1956 mechanism (e.g. extra parameter) was handled at RTL expansion time.
1958 This doesn't work with GCC 4 any more for several reasons. First, the
1959 gimplification process might need the creation of temporaries of this
1960 type, and the gimplifier ICEs on such attempts. Second, the middle-end
1961 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
1962 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
1963 be explicitly accounted for by the front-end in the function body.
1965 We achieve the complete transformation in two steps:
1967 1/ create_subprog_decl performs early attribute tweaks: it clears
1968 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
1969 the result decl. The former ensures that the bit isn't set in the GCC
1970 tree saved for the function, so prevents ICEs on temporary creation.
1971 The latter we use here to trigger the rest of the processing.
1973 2/ This function performs the type transformation on the result decl
1974 and adjusts all the references to this decl from the function body
1977 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
1978 strategy, which escapes the gimplifier temporary creation issues by
1979 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
1980 on simple specific support code in aggregate_value_p to look at the
1981 target function result decl explicitly. */
1983 struct pointer_set_t *p_set;
1984 tree decl_result = DECL_RESULT (fndecl);
1986 if (!DECL_BY_REFERENCE (decl_result))
1989 /* Make the DECL_RESULT explicitly by-reference and adjust all the
1990 occurrences in the function body using the common tree-walking facility.
1991 We want to see every occurrence of the result decl to adjust the
1992 referencing tree, so need to use our own pointer set to control which
1993 trees should be visited again or not. */
1995 p_set = pointer_set_create ();
1997 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
1998 TREE_ADDRESSABLE (decl_result) = 0;
1999 relayout_decl (decl_result);
2001 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2003 pointer_set_destroy (p_set);
2006 /* Finish the definition of the current subprogram BODY and compile it all the
2007 way to assembler language output. ELAB_P tells if this is called for an
2008 elaboration routine, to be entirely discarded if empty. */
2011 end_subprog_body (tree body, bool elab_p)
2013 tree fndecl = current_function_decl;
2015 /* Mark the BLOCK for this level as being for this function and pop the
2016 level. Since the vars in it are the parameters, clear them. */
2017 BLOCK_VARS (current_binding_level->block) = 0;
2018 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2019 DECL_INITIAL (fndecl) = current_binding_level->block;
2022 /* We handle pending sizes via the elaboration of types, so we don't
2023 need to save them. */
2024 get_pending_sizes ();
2026 /* Mark the RESULT_DECL as being in this subprogram. */
2027 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2029 DECL_SAVED_TREE (fndecl) = body;
2031 current_function_decl = DECL_CONTEXT (fndecl);
2034 /* We cannot track the location of errors past this point. */
2035 error_gnat_node = Empty;
2037 /* If we're only annotating types, don't actually compile this function. */
2038 if (type_annotate_only)
2041 /* Perform the required pre-gimplification transformations on the tree. */
2042 gnat_genericize (fndecl);
2044 /* We do different things for nested and non-nested functions.
2045 ??? This should be in cgraph. */
2046 if (!DECL_CONTEXT (fndecl))
2048 gnat_gimplify_function (fndecl);
2050 /* If this is an empty elaboration proc, just discard the node.
2051 Otherwise, compile further. */
2052 if (elab_p && empty_body_p (gimple_body (fndecl)))
2053 cgraph_remove_node (cgraph_node (fndecl));
2055 cgraph_finalize_function (fndecl, false);
2058 /* Register this function with cgraph just far enough to get it
2059 added to our parent's nested function list. */
2060 (void) cgraph_node (fndecl);
2063 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2066 gnat_gimplify_function (tree fndecl)
2068 struct cgraph_node *cgn;
2070 dump_function (TDI_original, fndecl);
2071 gimplify_function_tree (fndecl);
2072 dump_function (TDI_generic, fndecl);
2074 /* Convert all nested functions to GIMPLE now. We do things in this order
2075 so that items like VLA sizes are expanded properly in the context of the
2076 correct function. */
2077 cgn = cgraph_node (fndecl);
2078 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2079 gnat_gimplify_function (cgn->decl);
2083 gnat_builtin_function (tree decl)
2085 gnat_pushdecl (decl, Empty);
2089 /* Return an integer type with the number of bits of precision given by
2090 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2091 it is a signed type. */
2094 gnat_type_for_size (unsigned precision, int unsignedp)
2099 if (precision <= 2 * MAX_BITS_PER_WORD
2100 && signed_and_unsigned_types[precision][unsignedp])
2101 return signed_and_unsigned_types[precision][unsignedp];
2104 t = make_unsigned_type (precision);
2106 t = make_signed_type (precision);
2108 if (precision <= 2 * MAX_BITS_PER_WORD)
2109 signed_and_unsigned_types[precision][unsignedp] = t;
2113 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2114 TYPE_NAME (t) = get_identifier (type_name);
2120 /* Likewise for floating-point types. */
2123 float_type_for_precision (int precision, enum machine_mode mode)
2128 if (float_types[(int) mode])
2129 return float_types[(int) mode];
2131 float_types[(int) mode] = t = make_node (REAL_TYPE);
2132 TYPE_PRECISION (t) = precision;
2135 gcc_assert (TYPE_MODE (t) == mode);
2138 sprintf (type_name, "FLOAT_%d", precision);
2139 TYPE_NAME (t) = get_identifier (type_name);
2145 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2146 an unsigned type; otherwise a signed type is returned. */
2149 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2151 if (mode == BLKmode)
2153 else if (mode == VOIDmode)
2154 return void_type_node;
2155 else if (COMPLEX_MODE_P (mode))
2157 else if (SCALAR_FLOAT_MODE_P (mode))
2158 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2159 else if (SCALAR_INT_MODE_P (mode))
2160 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2165 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2168 gnat_unsigned_type (tree type_node)
2170 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2172 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2174 type = copy_node (type);
2175 TREE_TYPE (type) = type_node;
2177 else if (TREE_TYPE (type_node)
2178 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2179 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2181 type = copy_node (type);
2182 TREE_TYPE (type) = TREE_TYPE (type_node);
2188 /* Return the signed version of a TYPE_NODE, a scalar type. */
2191 gnat_signed_type (tree type_node)
2193 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2195 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2197 type = copy_node (type);
2198 TREE_TYPE (type) = type_node;
2200 else if (TREE_TYPE (type_node)
2201 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2202 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2204 type = copy_node (type);
2205 TREE_TYPE (type) = TREE_TYPE (type_node);
2211 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2212 transparently converted to each other. */
2215 gnat_types_compatible_p (tree t1, tree t2)
2217 enum tree_code code;
2219 /* This is the default criterion. */
2220 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2223 /* We only check structural equivalence here. */
2224 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2227 /* Array types are also compatible if they are constrained and have
2228 the same component type and the same domain. */
2229 if (code == ARRAY_TYPE
2230 && TREE_TYPE (t1) == TREE_TYPE (t2)
2231 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2232 || (TYPE_DOMAIN (t1)
2234 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2235 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2236 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2237 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2240 /* Padding record types are also compatible if they pad the same
2241 type and have the same constant size. */
2242 if (code == RECORD_TYPE
2243 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2244 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2245 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2251 /* EXP is an expression for the size of an object. If this size contains
2252 discriminant references, replace them with the maximum (if MAX_P) or
2253 minimum (if !MAX_P) possible value of the discriminant. */
2256 max_size (tree exp, bool max_p)
2258 enum tree_code code = TREE_CODE (exp);
2259 tree type = TREE_TYPE (exp);
2261 switch (TREE_CODE_CLASS (code))
2263 case tcc_declaration:
2268 if (code == CALL_EXPR)
2271 int i, n = call_expr_nargs (exp);
2274 argarray = (tree *) alloca (n * sizeof (tree));
2275 for (i = 0; i < n; i++)
2276 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2277 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2282 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2283 modify. Otherwise, we treat it like a variable. */
2284 if (!CONTAINS_PLACEHOLDER_P (exp))
2287 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2289 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2291 case tcc_comparison:
2292 return max_p ? size_one_node : size_zero_node;
2296 case tcc_expression:
2297 switch (TREE_CODE_LENGTH (code))
2300 if (code == NON_LVALUE_EXPR)
2301 return max_size (TREE_OPERAND (exp, 0), max_p);
2304 fold_build1 (code, type,
2305 max_size (TREE_OPERAND (exp, 0),
2306 code == NEGATE_EXPR ? !max_p : max_p));
2309 if (code == COMPOUND_EXPR)
2310 return max_size (TREE_OPERAND (exp, 1), max_p);
2312 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2313 may provide a tighter bound on max_size. */
2314 if (code == MINUS_EXPR
2315 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2317 tree lhs = fold_build2 (MINUS_EXPR, type,
2318 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2319 TREE_OPERAND (exp, 1));
2320 tree rhs = fold_build2 (MINUS_EXPR, type,
2321 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2322 TREE_OPERAND (exp, 1));
2323 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2324 max_size (lhs, max_p),
2325 max_size (rhs, max_p));
2329 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2330 tree rhs = max_size (TREE_OPERAND (exp, 1),
2331 code == MINUS_EXPR ? !max_p : max_p);
2333 /* Special-case wanting the maximum value of a MIN_EXPR.
2334 In that case, if one side overflows, return the other.
2335 sizetype is signed, but we know sizes are non-negative.
2336 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2337 overflowing or the maximum possible value and the RHS
2341 && TREE_CODE (rhs) == INTEGER_CST
2342 && TREE_OVERFLOW (rhs))
2346 && TREE_CODE (lhs) == INTEGER_CST
2347 && TREE_OVERFLOW (lhs))
2349 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2350 && ((TREE_CODE (lhs) == INTEGER_CST
2351 && TREE_OVERFLOW (lhs))
2352 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2353 && !TREE_CONSTANT (rhs))
2356 return fold_build2 (code, type, lhs, rhs);
2360 if (code == SAVE_EXPR)
2362 else if (code == COND_EXPR)
2363 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2364 max_size (TREE_OPERAND (exp, 1), max_p),
2365 max_size (TREE_OPERAND (exp, 2), max_p));
2368 /* Other tree classes cannot happen. */
2376 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2377 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2378 Return a constructor for the template. */
2381 build_template (tree template_type, tree array_type, tree expr)
2383 tree template_elts = NULL_TREE;
2384 tree bound_list = NULL_TREE;
2387 while (TREE_CODE (array_type) == RECORD_TYPE
2388 && (TYPE_IS_PADDING_P (array_type)
2389 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2390 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2392 if (TREE_CODE (array_type) == ARRAY_TYPE
2393 || (TREE_CODE (array_type) == INTEGER_TYPE
2394 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2395 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2397 /* First make the list for a CONSTRUCTOR for the template. Go down the
2398 field list of the template instead of the type chain because this
2399 array might be an Ada array of arrays and we can't tell where the
2400 nested arrays stop being the underlying object. */
2402 for (field = TYPE_FIELDS (template_type); field;
2404 ? (bound_list = TREE_CHAIN (bound_list))
2405 : (array_type = TREE_TYPE (array_type))),
2406 field = TREE_CHAIN (TREE_CHAIN (field)))
2408 tree bounds, min, max;
2410 /* If we have a bound list, get the bounds from there. Likewise
2411 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2412 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2413 This will give us a maximum range. */
2415 bounds = TREE_VALUE (bound_list);
2416 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2417 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2418 else if (expr && TREE_CODE (expr) == PARM_DECL
2419 && DECL_BY_COMPONENT_PTR_P (expr))
2420 bounds = TREE_TYPE (field);
2424 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2425 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2427 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2428 substitute it from OBJECT. */
2429 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2430 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2432 template_elts = tree_cons (TREE_CHAIN (field), max,
2433 tree_cons (field, min, template_elts));
2436 return gnat_build_constructor (template_type, nreverse (template_elts));
2439 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2440 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2441 in the type contains in its DECL_INITIAL the expression to use when
2442 a constructor is made for the type. GNAT_ENTITY is an entity used
2443 to print out an error message if the mechanism cannot be applied to
2444 an object of that type and also for the name. */
2447 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2449 tree record_type = make_node (RECORD_TYPE);
2450 tree pointer32_type;
2451 tree field_list = 0;
2460 /* If TYPE is an unconstrained array, use the underlying array type. */
2461 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2462 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2464 /* If this is an array, compute the number of dimensions in the array,
2465 get the index types, and point to the inner type. */
2466 if (TREE_CODE (type) != ARRAY_TYPE)
2469 for (ndim = 1, inner_type = type;
2470 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2471 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2472 ndim++, inner_type = TREE_TYPE (inner_type))
2475 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2477 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2478 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2479 for (i = ndim - 1, inner_type = type;
2481 i--, inner_type = TREE_TYPE (inner_type))
2482 idx_arr[i] = TYPE_DOMAIN (inner_type);
2484 for (i = 0, inner_type = type;
2486 i++, inner_type = TREE_TYPE (inner_type))
2487 idx_arr[i] = TYPE_DOMAIN (inner_type);
2489 /* Now get the DTYPE value. */
2490 switch (TREE_CODE (type))
2495 if (TYPE_VAX_FLOATING_POINT_P (type))
2496 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2509 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2512 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2515 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2518 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2521 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2524 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2530 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2534 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2535 && TYPE_VAX_FLOATING_POINT_P (type))
2536 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2548 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2559 /* Get the CLASS value. */
2562 case By_Descriptor_A:
2563 case By_Short_Descriptor_A:
2566 case By_Descriptor_NCA:
2567 case By_Short_Descriptor_NCA:
2570 case By_Descriptor_SB:
2571 case By_Short_Descriptor_SB:
2575 case By_Short_Descriptor:
2576 case By_Descriptor_S:
2577 case By_Short_Descriptor_S:
2583 /* Make the type for a descriptor for VMS. The first four fields
2584 are the same for all types. */
2587 = chainon (field_list,
2588 make_descriptor_field
2589 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2590 size_in_bytes ((mech == By_Descriptor_A ||
2591 mech == By_Short_Descriptor_A)
2592 ? inner_type : type)));
2594 field_list = chainon (field_list,
2595 make_descriptor_field ("DTYPE",
2596 gnat_type_for_size (8, 1),
2597 record_type, size_int (dtype)));
2598 field_list = chainon (field_list,
2599 make_descriptor_field ("CLASS",
2600 gnat_type_for_size (8, 1),
2601 record_type, size_int (class)));
2603 /* Of course this will crash at run-time if the address space is not
2604 within the low 32 bits, but there is nothing else we can do. */
2605 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2608 = chainon (field_list,
2609 make_descriptor_field
2610 ("POINTER", pointer32_type, record_type,
2611 build_unary_op (ADDR_EXPR,
2613 build0 (PLACEHOLDER_EXPR, type))));
2618 case By_Short_Descriptor:
2619 case By_Descriptor_S:
2620 case By_Short_Descriptor_S:
2623 case By_Descriptor_SB:
2624 case By_Short_Descriptor_SB:
2626 = chainon (field_list,
2627 make_descriptor_field
2628 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2629 TREE_CODE (type) == ARRAY_TYPE
2630 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2632 = chainon (field_list,
2633 make_descriptor_field
2634 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2635 TREE_CODE (type) == ARRAY_TYPE
2636 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2639 case By_Descriptor_A:
2640 case By_Short_Descriptor_A:
2641 case By_Descriptor_NCA:
2642 case By_Short_Descriptor_NCA:
2643 field_list = chainon (field_list,
2644 make_descriptor_field ("SCALE",
2645 gnat_type_for_size (8, 1),
2649 field_list = chainon (field_list,
2650 make_descriptor_field ("DIGITS",
2651 gnat_type_for_size (8, 1),
2656 = chainon (field_list,
2657 make_descriptor_field
2658 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2659 size_int ((mech == By_Descriptor_NCA ||
2660 mech == By_Short_Descriptor_NCA)
2662 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2663 : (TREE_CODE (type) == ARRAY_TYPE
2664 && TYPE_CONVENTION_FORTRAN_P (type)
2667 field_list = chainon (field_list,
2668 make_descriptor_field ("DIMCT",
2669 gnat_type_for_size (8, 1),
2673 field_list = chainon (field_list,
2674 make_descriptor_field ("ARSIZE",
2675 gnat_type_for_size (32, 1),
2677 size_in_bytes (type)));
2679 /* Now build a pointer to the 0,0,0... element. */
2680 tem = build0 (PLACEHOLDER_EXPR, type);
2681 for (i = 0, inner_type = type; i < ndim;
2682 i++, inner_type = TREE_TYPE (inner_type))
2683 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2684 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2685 NULL_TREE, NULL_TREE);
2688 = chainon (field_list,
2689 make_descriptor_field
2691 build_pointer_type_for_mode (inner_type, SImode, false),
2694 build_pointer_type_for_mode (inner_type, SImode,
2698 /* Next come the addressing coefficients. */
2699 tem = size_one_node;
2700 for (i = 0; i < ndim; i++)
2704 = size_binop (MULT_EXPR, tem,
2705 size_binop (PLUS_EXPR,
2706 size_binop (MINUS_EXPR,
2707 TYPE_MAX_VALUE (idx_arr[i]),
2708 TYPE_MIN_VALUE (idx_arr[i])),
2711 fname[0] = ((mech == By_Descriptor_NCA ||
2712 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2713 fname[1] = '0' + i, fname[2] = 0;
2715 = chainon (field_list,
2716 make_descriptor_field (fname,
2717 gnat_type_for_size (32, 1),
2718 record_type, idx_length));
2720 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2724 /* Finally here are the bounds. */
2725 for (i = 0; i < ndim; i++)
2729 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2731 = chainon (field_list,
2732 make_descriptor_field
2733 (fname, gnat_type_for_size (32, 1), record_type,
2734 TYPE_MIN_VALUE (idx_arr[i])));
2738 = chainon (field_list,
2739 make_descriptor_field
2740 (fname, gnat_type_for_size (32, 1), record_type,
2741 TYPE_MAX_VALUE (idx_arr[i])));
2746 post_error ("unsupported descriptor type for &", gnat_entity);
2749 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2750 finish_record_type (record_type, field_list, 0, true);
2754 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2755 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2756 in the type contains in its DECL_INITIAL the expression to use when
2757 a constructor is made for the type. GNAT_ENTITY is an entity used
2758 to print out an error message if the mechanism cannot be applied to
2759 an object of that type and also for the name. */
2762 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2764 tree record64_type = make_node (RECORD_TYPE);
2765 tree pointer64_type;
2766 tree field_list64 = 0;
2775 /* If TYPE is an unconstrained array, use the underlying array type. */
2776 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2777 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2779 /* If this is an array, compute the number of dimensions in the array,
2780 get the index types, and point to the inner type. */
2781 if (TREE_CODE (type) != ARRAY_TYPE)
2784 for (ndim = 1, inner_type = type;
2785 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2786 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2787 ndim++, inner_type = TREE_TYPE (inner_type))
2790 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2792 if (mech != By_Descriptor_NCA
2793 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2794 for (i = ndim - 1, inner_type = type;
2796 i--, inner_type = TREE_TYPE (inner_type))
2797 idx_arr[i] = TYPE_DOMAIN (inner_type);
2799 for (i = 0, inner_type = type;
2801 i++, inner_type = TREE_TYPE (inner_type))
2802 idx_arr[i] = TYPE_DOMAIN (inner_type);
2804 /* Now get the DTYPE value. */
2805 switch (TREE_CODE (type))
2810 if (TYPE_VAX_FLOATING_POINT_P (type))
2811 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2824 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2827 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2830 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2833 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2836 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2839 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2845 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2849 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2850 && TYPE_VAX_FLOATING_POINT_P (type))
2851 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2863 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2874 /* Get the CLASS value. */
2877 case By_Descriptor_A:
2880 case By_Descriptor_NCA:
2883 case By_Descriptor_SB:
2887 case By_Descriptor_S:
2893 /* Make the type for a 64bit descriptor for VMS. The first six fields
2894 are the same for all types. */
2896 field_list64 = chainon (field_list64,
2897 make_descriptor_field ("MBO",
2898 gnat_type_for_size (16, 1),
2899 record64_type, size_int (1)));
2901 field_list64 = chainon (field_list64,
2902 make_descriptor_field ("DTYPE",
2903 gnat_type_for_size (8, 1),
2904 record64_type, size_int (dtype)));
2905 field_list64 = chainon (field_list64,
2906 make_descriptor_field ("CLASS",
2907 gnat_type_for_size (8, 1),
2908 record64_type, size_int (class)));
2910 field_list64 = chainon (field_list64,
2911 make_descriptor_field ("MBMO",
2912 gnat_type_for_size (32, 1),
2913 record64_type, ssize_int (-1)));
2916 = chainon (field_list64,
2917 make_descriptor_field
2918 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2919 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2921 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2924 = chainon (field_list64,
2925 make_descriptor_field
2926 ("POINTER", pointer64_type, record64_type,
2927 build_unary_op (ADDR_EXPR,
2929 build0 (PLACEHOLDER_EXPR, type))));
2934 case By_Descriptor_S:
2937 case By_Descriptor_SB:
2939 = chainon (field_list64,
2940 make_descriptor_field
2941 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2942 TREE_CODE (type) == ARRAY_TYPE
2943 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2945 = chainon (field_list64,
2946 make_descriptor_field
2947 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2948 TREE_CODE (type) == ARRAY_TYPE
2949 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2952 case By_Descriptor_A:
2953 case By_Descriptor_NCA:
2954 field_list64 = chainon (field_list64,
2955 make_descriptor_field ("SCALE",
2956 gnat_type_for_size (8, 1),
2960 field_list64 = chainon (field_list64,
2961 make_descriptor_field ("DIGITS",
2962 gnat_type_for_size (8, 1),
2967 = chainon (field_list64,
2968 make_descriptor_field
2969 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2970 size_int (mech == By_Descriptor_NCA
2972 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2973 : (TREE_CODE (type) == ARRAY_TYPE
2974 && TYPE_CONVENTION_FORTRAN_P (type)
2977 field_list64 = chainon (field_list64,
2978 make_descriptor_field ("DIMCT",
2979 gnat_type_for_size (8, 1),
2983 field_list64 = chainon (field_list64,
2984 make_descriptor_field ("MBZ",
2985 gnat_type_for_size (32, 1),
2988 field_list64 = chainon (field_list64,
2989 make_descriptor_field ("ARSIZE",
2990 gnat_type_for_size (64, 1),
2992 size_in_bytes (type)));
2994 /* Now build a pointer to the 0,0,0... element. */
2995 tem = build0 (PLACEHOLDER_EXPR, type);
2996 for (i = 0, inner_type = type; i < ndim;
2997 i++, inner_type = TREE_TYPE (inner_type))
2998 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2999 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3000 NULL_TREE, NULL_TREE);
3003 = chainon (field_list64,
3004 make_descriptor_field
3006 build_pointer_type_for_mode (inner_type, DImode, false),
3009 build_pointer_type_for_mode (inner_type, DImode,
3013 /* Next come the addressing coefficients. */
3014 tem = size_one_node;
3015 for (i = 0; i < ndim; i++)
3019 = size_binop (MULT_EXPR, tem,
3020 size_binop (PLUS_EXPR,
3021 size_binop (MINUS_EXPR,
3022 TYPE_MAX_VALUE (idx_arr[i]),
3023 TYPE_MIN_VALUE (idx_arr[i])),
3026 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3027 fname[1] = '0' + i, fname[2] = 0;
3029 = chainon (field_list64,
3030 make_descriptor_field (fname,
3031 gnat_type_for_size (64, 1),
3032 record64_type, idx_length));
3034 if (mech == By_Descriptor_NCA)
3038 /* Finally here are the bounds. */
3039 for (i = 0; i < ndim; i++)
3043 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3045 = chainon (field_list64,
3046 make_descriptor_field
3047 (fname, gnat_type_for_size (64, 1), record64_type,
3048 TYPE_MIN_VALUE (idx_arr[i])));
3052 = chainon (field_list64,
3053 make_descriptor_field
3054 (fname, gnat_type_for_size (64, 1), record64_type,
3055 TYPE_MAX_VALUE (idx_arr[i])));
3060 post_error ("unsupported descriptor type for &", gnat_entity);
3063 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3064 finish_record_type (record64_type, field_list64, 0, true);
3065 return record64_type;
3068 /* Utility routine for above code to make a field. */
3071 make_descriptor_field (const char *name, tree type,
3072 tree rec_type, tree initial)
3075 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3077 DECL_INITIAL (field) = initial;
3081 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3082 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3083 which the VMS descriptor is passed. */
3086 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3088 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3089 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3090 /* The CLASS field is the 3rd field in the descriptor. */
3091 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3092 /* The POINTER field is the 6th field in the descriptor. */
3093 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3095 /* Retrieve the value of the POINTER field. */
3097 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3099 if (POINTER_TYPE_P (gnu_type))
3100 return convert (gnu_type, gnu_expr64);
3102 else if (TYPE_FAT_POINTER_P (gnu_type))
3104 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3105 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3106 tree template_type = TREE_TYPE (p_bounds_type);
3107 tree min_field = TYPE_FIELDS (template_type);
3108 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3109 tree template, template_addr, aflags, dimct, t, u;
3110 /* See the head comment of build_vms_descriptor. */
3111 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3112 tree lfield, ufield;
3114 /* Convert POINTER to the type of the P_ARRAY field. */
3115 gnu_expr64 = convert (p_array_type, gnu_expr64);
3119 case 1: /* Class S */
3120 case 15: /* Class SB */
3121 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3122 t = TREE_CHAIN (TREE_CHAIN (class));
3123 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3124 t = tree_cons (min_field,
3125 convert (TREE_TYPE (min_field), integer_one_node),
3126 tree_cons (max_field,
3127 convert (TREE_TYPE (max_field), t),
3129 template = gnat_build_constructor (template_type, t);
3130 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3132 /* For class S, we are done. */
3136 /* Test that we really have a SB descriptor, like DEC Ada. */
3137 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3138 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3139 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3140 /* If so, there is already a template in the descriptor and
3141 it is located right after the POINTER field. The fields are
3142 64bits so they must be repacked. */
3143 t = TREE_CHAIN (pointer64);
3144 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3145 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3148 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3150 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3152 /* Build the template in the form of a constructor. */
3153 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3154 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3155 ufield, NULL_TREE));
3156 template = gnat_build_constructor (template_type, t);
3158 /* Otherwise use the {1, LENGTH} template we build above. */
3159 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3160 build_unary_op (ADDR_EXPR, p_bounds_type,
3165 case 4: /* Class A */
3166 /* The AFLAGS field is the 3rd field after the pointer in the
3168 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3169 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3170 /* The DIMCT field is the next field in the descriptor after
3173 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3174 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3175 or FL_COEFF or FL_BOUNDS not set. */
3176 u = build_int_cst (TREE_TYPE (aflags), 192);
3177 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3178 build_binary_op (NE_EXPR, integer_type_node,
3180 convert (TREE_TYPE (dimct),
3182 build_binary_op (NE_EXPR, integer_type_node,
3183 build2 (BIT_AND_EXPR,
3187 /* There is already a template in the descriptor and it is located
3188 in block 3. The fields are 64bits so they must be repacked. */
3189 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3191 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3192 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3195 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3197 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3199 /* Build the template in the form of a constructor. */
3200 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3201 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3202 ufield, NULL_TREE));
3203 template = gnat_build_constructor (template_type, t);
3204 template = build3 (COND_EXPR, p_bounds_type, u,
3205 build_call_raise (CE_Length_Check_Failed, Empty,
3206 N_Raise_Constraint_Error),
3208 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3211 case 10: /* Class NCA */
3213 post_error ("unsupported descriptor type for &", gnat_subprog);
3214 template_addr = integer_zero_node;
3218 /* Build the fat pointer in the form of a constructor. */
3219 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3220 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3221 template_addr, NULL_TREE));
3222 return gnat_build_constructor (gnu_type, t);
3229 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3230 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3231 which the VMS descriptor is passed. */
3234 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3236 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3237 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3238 /* The CLASS field is the 3rd field in the descriptor. */
3239 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3240 /* The POINTER field is the 4th field in the descriptor. */
3241 tree pointer = TREE_CHAIN (class);
3243 /* Retrieve the value of the POINTER field. */
3245 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3247 if (POINTER_TYPE_P (gnu_type))
3248 return convert (gnu_type, gnu_expr32);
3250 else if (TYPE_FAT_POINTER_P (gnu_type))
3252 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3253 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3254 tree template_type = TREE_TYPE (p_bounds_type);
3255 tree min_field = TYPE_FIELDS (template_type);
3256 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3257 tree template, template_addr, aflags, dimct, t, u;
3258 /* See the head comment of build_vms_descriptor. */
3259 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3261 /* Convert POINTER to the type of the P_ARRAY field. */
3262 gnu_expr32 = convert (p_array_type, gnu_expr32);
3266 case 1: /* Class S */
3267 case 15: /* Class SB */
3268 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3269 t = TYPE_FIELDS (desc_type);
3270 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3271 t = tree_cons (min_field,
3272 convert (TREE_TYPE (min_field), integer_one_node),
3273 tree_cons (max_field,
3274 convert (TREE_TYPE (max_field), t),
3276 template = gnat_build_constructor (template_type, t);
3277 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3279 /* For class S, we are done. */
3283 /* Test that we really have a SB descriptor, like DEC Ada. */
3284 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3285 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3286 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3287 /* If so, there is already a template in the descriptor and
3288 it is located right after the POINTER field. */
3289 t = TREE_CHAIN (pointer);
3290 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3291 /* Otherwise use the {1, LENGTH} template we build above. */
3292 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3293 build_unary_op (ADDR_EXPR, p_bounds_type,
3298 case 4: /* Class A */
3299 /* The AFLAGS field is the 7th field in the descriptor. */
3300 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3301 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3302 /* The DIMCT field is the 8th field in the descriptor. */
3304 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3305 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3306 or FL_COEFF or FL_BOUNDS not set. */
3307 u = build_int_cst (TREE_TYPE (aflags), 192);
3308 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3309 build_binary_op (NE_EXPR, integer_type_node,
3311 convert (TREE_TYPE (dimct),
3313 build_binary_op (NE_EXPR, integer_type_node,
3314 build2 (BIT_AND_EXPR,
3318 /* There is already a template in the descriptor and it is
3319 located at the start of block 3 (12th field). */
3320 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3321 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3322 template = build3 (COND_EXPR, p_bounds_type, u,
3323 build_call_raise (CE_Length_Check_Failed, Empty,
3324 N_Raise_Constraint_Error),
3326 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3329 case 10: /* Class NCA */
3331 post_error ("unsupported descriptor type for &", gnat_subprog);
3332 template_addr = integer_zero_node;
3336 /* Build the fat pointer in the form of a constructor. */
3337 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3338 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3339 template_addr, NULL_TREE));
3341 return gnat_build_constructor (gnu_type, t);
3348 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3349 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3350 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3351 VMS descriptor is passed. */
3354 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3355 Entity_Id gnat_subprog)
3357 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3358 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3359 tree mbo = TYPE_FIELDS (desc_type);
3360 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3361 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3362 tree is64bit, gnu_expr32, gnu_expr64;
3364 /* If the field name is not MBO, it must be 32-bit and no alternate.
3365 Otherwise primary must be 64-bit and alternate 32-bit. */
3366 if (strcmp (mbostr, "MBO") != 0)
3367 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3369 /* Build the test for 64-bit descriptor. */
3370 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3371 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3373 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3374 build_binary_op (EQ_EXPR, integer_type_node,
3375 convert (integer_type_node, mbo),
3377 build_binary_op (EQ_EXPR, integer_type_node,
3378 convert (integer_type_node, mbmo),
3379 integer_minus_one_node));
3381 /* Build the 2 possible end results. */
3382 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3383 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3384 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3386 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3389 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3390 and the GNAT node GNAT_SUBPROG. */
3393 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3395 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3396 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3397 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3400 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3401 gnu_param_list = NULL_TREE;
3403 begin_subprog_body (gnu_stub_decl);
3406 start_stmt_group ();
3408 /* Loop over the parameters of the stub and translate any of them
3409 passed by descriptor into a by reference one. */
3410 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3411 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3413 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3414 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3416 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3418 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3420 DECL_PARM_ALT_TYPE (gnu_stub_param),
3423 gnu_param = gnu_stub_param;
3425 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3428 gnu_body = end_stmt_group ();
3430 /* Invoke the internal subprogram. */
3431 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3433 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3435 nreverse (gnu_param_list));
3437 /* Propagate the return value, if any. */
3438 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3439 append_to_statement_list (gnu_subprog_call, &gnu_body);
3441 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3447 allocate_struct_function (gnu_stub_decl, false);
3448 end_subprog_body (gnu_body, false);
3451 /* Build a type to be used to represent an aliased object whose nominal
3452 type is an unconstrained array. This consists of a RECORD_TYPE containing
3453 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3454 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3455 is used to represent an arbitrary unconstrained object. Use NAME
3456 as the name of the record. */
3459 build_unc_object_type (tree template_type, tree object_type, tree name)
3461 tree type = make_node (RECORD_TYPE);
3462 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3463 template_type, type, 0, 0, 0, 1);
3464 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3467 TYPE_NAME (type) = name;
3468 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3469 finish_record_type (type,
3470 chainon (chainon (NULL_TREE, template_field),
3477 /* Same, taking a thin or fat pointer type instead of a template type. */
3480 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3485 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3488 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3489 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3490 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3491 return build_unc_object_type (template_type, object_type, name);
3494 /* Shift the component offsets within an unconstrained object TYPE to make it
3495 suitable for use as a designated type for thin pointers. */
3498 shift_unc_components_for_thin_pointers (tree type)
3500 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3501 allocated past the BOUNDS template. The designated type is adjusted to
3502 have ARRAY at position zero and the template at a negative offset, so
3503 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3505 tree bounds_field = TYPE_FIELDS (type);
3506 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3508 DECL_FIELD_OFFSET (bounds_field)
3509 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3511 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3512 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3515 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3516 In the normal case this is just two adjustments, but we have more to
3517 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3520 update_pointer_to (tree old_type, tree new_type)
3522 tree ptr = TYPE_POINTER_TO (old_type);
3523 tree ref = TYPE_REFERENCE_TO (old_type);
3527 /* If this is the main variant, process all the other variants first. */
3528 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3529 for (type = TYPE_NEXT_VARIANT (old_type); type;
3530 type = TYPE_NEXT_VARIANT (type))
3531 update_pointer_to (type, new_type);
3533 /* If no pointers and no references, we are done. */
3537 /* Merge the old type qualifiers in the new type.
3539 Each old variant has qualifiers for specific reasons, and the new
3540 designated type as well. Each set of qualifiers represents useful
3541 information grabbed at some point, and merging the two simply unifies
3542 these inputs into the final type description.
3544 Consider for instance a volatile type frozen after an access to constant
3545 type designating it; after the designated type's freeze, we get here with
3546 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3547 when the access type was processed. We will make a volatile and readonly
3548 designated type, because that's what it really is.
3550 We might also get here for a non-dummy OLD_TYPE variant with different
3551 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3552 to private record type elaboration (see the comments around the call to
3553 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3554 the qualifiers in those cases too, to avoid accidentally discarding the
3555 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3557 = build_qualified_type (new_type,
3558 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3560 /* If old type and new type are identical, there is nothing to do. */
3561 if (old_type == new_type)
3564 /* Otherwise, first handle the simple case. */
3565 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3567 TYPE_POINTER_TO (new_type) = ptr;
3568 TYPE_REFERENCE_TO (new_type) = ref;
3570 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3571 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3572 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3573 TREE_TYPE (ptr1) = new_type;
3575 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3576 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3577 ref1 = TYPE_NEXT_VARIANT (ref1))
3578 TREE_TYPE (ref1) = new_type;
3581 /* Now deal with the unconstrained array case. In this case the "pointer"
3582 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3583 Turn them into pointers to the correct types using update_pointer_to. */
3584 else if (!TYPE_FAT_POINTER_P (ptr))
3589 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3590 tree array_field = TYPE_FIELDS (ptr);
3591 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3592 tree new_ptr = TYPE_POINTER_TO (new_type);
3596 /* Make pointers to the dummy template point to the real template. */
3598 (TREE_TYPE (TREE_TYPE (bounds_field)),
3599 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3601 /* The references to the template bounds present in the array type
3602 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3603 are updating PTR to make it a full replacement for NEW_PTR as
3604 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3605 to make it of type PTR. */
3606 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3607 build0 (PLACEHOLDER_EXPR, ptr),
3608 bounds_field, NULL_TREE);
3610 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3611 to the dummy array point to it. */
3613 (TREE_TYPE (TREE_TYPE (array_field)),
3614 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3615 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3617 /* Make PTR the pointer to NEW_TYPE. */
3618 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3619 = TREE_TYPE (new_type) = ptr;
3621 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3622 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3624 /* Now handle updating the allocation record, what the thin pointer
3625 points to. Update all pointers from the old record into the new
3626 one, update the type of the array field, and recompute the size. */
3627 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3629 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3630 = TREE_TYPE (TREE_TYPE (array_field));
3632 /* The size recomputation needs to account for alignment constraints, so
3633 we let layout_type work it out. This will reset the field offsets to
3634 what they would be in a regular record, so we shift them back to what
3635 we want them to be for a thin pointer designated type afterwards. */
3636 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3637 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3638 TYPE_SIZE (new_obj_rec) = 0;
3639 layout_type (new_obj_rec);
3641 shift_unc_components_for_thin_pointers (new_obj_rec);
3643 /* We are done, at last. */
3644 rest_of_record_type_compilation (ptr);
3648 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3649 unconstrained one. This involves making or finding a template. */
3652 convert_to_fat_pointer (tree type, tree expr)
3654 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3655 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3656 tree etype = TREE_TYPE (expr);
3659 /* If EXPR is null, make a fat pointer that contains null pointers to the
3660 template and array. */
3661 if (integer_zerop (expr))
3663 gnat_build_constructor
3665 tree_cons (TYPE_FIELDS (type),
3666 convert (p_array_type, expr),
3667 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3668 convert (build_pointer_type (template_type),
3672 /* If EXPR is a thin pointer, make template and data from the record.. */
3673 else if (TYPE_THIN_POINTER_P (etype))
3675 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3677 expr = save_expr (expr);
3678 if (TREE_CODE (expr) == ADDR_EXPR)
3679 expr = TREE_OPERAND (expr, 0);
3681 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3683 template = build_component_ref (expr, NULL_TREE, fields, false);
3684 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3685 build_component_ref (expr, NULL_TREE,
3686 TREE_CHAIN (fields), false));
3689 /* Otherwise, build the constructor for the template. */
3691 template = build_template (template_type, TREE_TYPE (etype), expr);
3693 /* The final result is a constructor for the fat pointer.
3695 If EXPR is an argument of a foreign convention subprogram, the type it
3696 points to is directly the component type. In this case, the expression
3697 type may not match the corresponding FIELD_DECL type at this point, so we
3698 call "convert" here to fix that up if necessary. This type consistency is
3699 required, for instance because it ensures that possible later folding of
3700 COMPONENT_REFs against this constructor always yields something of the
3701 same type as the initial reference.
3703 Note that the call to "build_template" above is still fine because it
3704 will only refer to the provided TEMPLATE_TYPE in this case. */
3706 gnat_build_constructor
3708 tree_cons (TYPE_FIELDS (type),
3709 convert (p_array_type, expr),
3710 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3711 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3715 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3716 is something that is a fat pointer, so convert to it first if it EXPR
3717 is not already a fat pointer. */
3720 convert_to_thin_pointer (tree type, tree expr)
3722 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3724 = convert_to_fat_pointer
3725 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3727 /* We get the pointer to the data and use a NOP_EXPR to make it the
3729 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3731 expr = build1 (NOP_EXPR, type, expr);
3736 /* Create an expression whose value is that of EXPR,
3737 converted to type TYPE. The TREE_TYPE of the value
3738 is always TYPE. This function implements all reasonable
3739 conversions; callers should filter out those that are
3740 not permitted by the language being compiled. */
3743 convert (tree type, tree expr)
3745 enum tree_code code = TREE_CODE (type);
3746 tree etype = TREE_TYPE (expr);
3747 enum tree_code ecode = TREE_CODE (etype);
3749 /* If EXPR is already the right type, we are done. */
3753 /* If both input and output have padding and are of variable size, do this
3754 as an unchecked conversion. Likewise if one is a mere variant of the
3755 other, so we avoid a pointless unpad/repad sequence. */
3756 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3757 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3758 && (!TREE_CONSTANT (TYPE_SIZE (type))
3759 || !TREE_CONSTANT (TYPE_SIZE (etype))
3760 || gnat_types_compatible_p (type, etype)
3761 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3762 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3765 /* If the output type has padding, convert to the inner type and
3766 make a constructor to build the record. */
3767 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3769 /* If we previously converted from another type and our type is
3770 of variable size, remove the conversion to avoid the need for
3771 variable-size temporaries. Likewise for a conversion between
3772 original and packable version. */
3773 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3774 && (!TREE_CONSTANT (TYPE_SIZE (type))
3775 || (ecode == RECORD_TYPE
3776 && TYPE_NAME (etype)
3777 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3778 expr = TREE_OPERAND (expr, 0);
3780 /* If we are just removing the padding from expr, convert the original
3781 object if we have variable size in order to avoid the need for some
3782 variable-size temporaries. Likewise if the padding is a mere variant
3783 of the other, so we avoid a pointless unpad/repad sequence. */
3784 if (TREE_CODE (expr) == COMPONENT_REF
3785 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3786 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3787 && (!TREE_CONSTANT (TYPE_SIZE (type))
3788 || gnat_types_compatible_p (type,
3789 TREE_TYPE (TREE_OPERAND (expr, 0)))
3790 || (ecode == RECORD_TYPE
3791 && TYPE_NAME (etype)
3792 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3793 return convert (type, TREE_OPERAND (expr, 0));
3795 /* If the result type is a padded type with a self-referentially-sized
3796 field and the expression type is a record, do this as an
3797 unchecked conversion. */
3798 else if (TREE_CODE (etype) == RECORD_TYPE
3799 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3800 return unchecked_convert (type, expr, false);
3804 gnat_build_constructor (type,
3805 tree_cons (TYPE_FIELDS (type),
3807 (TYPE_FIELDS (type)),
3812 /* If the input type has padding, remove it and convert to the output type.
3813 The conditions ordering is arranged to ensure that the output type is not
3814 a padding type here, as it is not clear whether the conversion would
3815 always be correct if this was to happen. */
3816 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3820 /* If we have just converted to this padded type, just get the
3821 inner expression. */
3822 if (TREE_CODE (expr) == CONSTRUCTOR
3823 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3824 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3825 == TYPE_FIELDS (etype))
3827 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3829 /* Otherwise, build an explicit component reference. */
3832 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3834 return convert (type, unpadded);
3837 /* If the input is a biased type, adjust first. */
3838 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3839 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3840 fold_convert (TREE_TYPE (etype),
3842 TYPE_MIN_VALUE (etype)));
3844 /* If the input is a justified modular type, we need to extract the actual
3845 object before converting it to any other type with the exceptions of an
3846 unconstrained array or of a mere type variant. It is useful to avoid the
3847 extraction and conversion in the type variant case because it could end
3848 up replacing a VAR_DECL expr by a constructor and we might be about the
3849 take the address of the result. */
3850 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3851 && code != UNCONSTRAINED_ARRAY_TYPE
3852 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3853 return convert (type, build_component_ref (expr, NULL_TREE,
3854 TYPE_FIELDS (etype), false));
3856 /* If converting to a type that contains a template, convert to the data
3857 type and then build the template. */
3858 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3860 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3862 /* If the source already has a template, get a reference to the
3863 associated array only, as we are going to rebuild a template
3864 for the target type anyway. */
3865 expr = maybe_unconstrained_array (expr);
3868 gnat_build_constructor
3870 tree_cons (TYPE_FIELDS (type),
3871 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3872 obj_type, NULL_TREE),
3873 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3874 convert (obj_type, expr), NULL_TREE)));
3877 /* There are some special cases of expressions that we process
3879 switch (TREE_CODE (expr))
3885 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3886 conversion in gnat_expand_expr. NULL_EXPR does not represent
3887 and actual value, so no conversion is needed. */
3888 expr = copy_node (expr);
3889 TREE_TYPE (expr) = type;
3893 /* If we are converting a STRING_CST to another constrained array type,
3894 just make a new one in the proper type. */
3895 if (code == ecode && AGGREGATE_TYPE_P (etype)
3896 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3897 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3899 expr = copy_node (expr);
3900 TREE_TYPE (expr) = type;
3906 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3907 a new one in the proper type. */
3908 if (code == ecode && gnat_types_compatible_p (type, etype))
3910 expr = copy_node (expr);
3911 TREE_TYPE (expr) = type;
3915 /* Likewise for a conversion between original and packable version, but
3916 we have to work harder in order to preserve type consistency. */
3918 && code == RECORD_TYPE
3919 && TYPE_NAME (type) == TYPE_NAME (etype))
3921 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3922 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3923 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3924 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3925 unsigned HOST_WIDE_INT idx;
3928 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3930 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3931 /* We expect only simple constructors. Otherwise, punt. */
3932 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3935 elt->value = convert (TREE_TYPE (field), value);
3936 efield = TREE_CHAIN (efield);
3937 field = TREE_CHAIN (field);
3942 expr = copy_node (expr);
3943 TREE_TYPE (expr) = type;
3944 CONSTRUCTOR_ELTS (expr) = v;
3950 case UNCONSTRAINED_ARRAY_REF:
3951 /* Convert this to the type of the inner array by getting the address of
3952 the array from the template. */
3953 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3954 build_component_ref (TREE_OPERAND (expr, 0),
3955 get_identifier ("P_ARRAY"),
3957 etype = TREE_TYPE (expr);
3958 ecode = TREE_CODE (etype);
3961 case VIEW_CONVERT_EXPR:
3963 /* GCC 4.x is very sensitive to type consistency overall, and view
3964 conversions thus are very frequent. Even though just "convert"ing
3965 the inner operand to the output type is fine in most cases, it
3966 might expose unexpected input/output type mismatches in special
3967 circumstances so we avoid such recursive calls when we can. */
3968 tree op0 = TREE_OPERAND (expr, 0);
3970 /* If we are converting back to the original type, we can just
3971 lift the input conversion. This is a common occurrence with
3972 switches back-and-forth amongst type variants. */
3973 if (type == TREE_TYPE (op0))
3976 /* Otherwise, if we're converting between two aggregate types, we
3977 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3978 in place or to just convert the inner expression. */
3979 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3981 /* If we are converting between mere variants, we can just
3982 substitute the VIEW_CONVERT_EXPR in place. */
3983 if (gnat_types_compatible_p (type, etype))
3984 return build1 (VIEW_CONVERT_EXPR, type, op0);
3986 /* Otherwise, we may just bypass the input view conversion unless
3987 one of the types is a fat pointer, which is handled by
3988 specialized code below which relies on exact type matching. */
3989 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3990 return convert (type, op0);
3996 /* If both types are record types, just convert the pointer and
3997 make a new INDIRECT_REF.
3999 ??? Disable this for now since it causes problems with the
4000 code in build_binary_op for MODIFY_EXPR which wants to
4001 strip off conversions. But that code really is a mess and
4002 we need to do this a much better way some time. */
4004 && (TREE_CODE (type) == RECORD_TYPE
4005 || TREE_CODE (type) == UNION_TYPE)
4006 && (TREE_CODE (etype) == RECORD_TYPE
4007 || TREE_CODE (etype) == UNION_TYPE)
4008 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4009 return build_unary_op (INDIRECT_REF, NULL_TREE,
4010 convert (build_pointer_type (type),
4011 TREE_OPERAND (expr, 0)));
4018 /* Check for converting to a pointer to an unconstrained array. */
4019 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4020 return convert_to_fat_pointer (type, expr);
4022 /* If we are converting between two aggregate types that are mere
4023 variants, just make a VIEW_CONVERT_EXPR. */
4024 else if (code == ecode
4025 && AGGREGATE_TYPE_P (type)
4026 && gnat_types_compatible_p (type, etype))
4027 return build1 (VIEW_CONVERT_EXPR, type, expr);
4029 /* In all other cases of related types, make a NOP_EXPR. */
4030 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4031 || (code == INTEGER_CST && ecode == INTEGER_CST
4032 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4033 return fold_convert (type, expr);
4038 return fold_build1 (CONVERT_EXPR, type, expr);
4041 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4042 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4043 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4044 return unchecked_convert (type, expr, false);
4045 else if (TYPE_BIASED_REPRESENTATION_P (type))
4046 return fold_convert (type,
4047 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4048 convert (TREE_TYPE (type), expr),
4049 TYPE_MIN_VALUE (type)));
4051 /* ... fall through ... */
4055 /* If we are converting an additive expression to an integer type
4056 with lower precision, be wary of the optimization that can be
4057 applied by convert_to_integer. There are 2 problematic cases:
4058 - if the first operand was originally of a biased type,
4059 because we could be recursively called to convert it
4060 to an intermediate type and thus rematerialize the
4061 additive operator endlessly,
4062 - if the expression contains a placeholder, because an
4063 intermediate conversion that changes the sign could
4064 be inserted and thus introduce an artificial overflow
4065 at compile time when the placeholder is substituted. */
4066 if (code == INTEGER_TYPE
4067 && ecode == INTEGER_TYPE
4068 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4069 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4071 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4073 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4074 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4075 || CONTAINS_PLACEHOLDER_P (expr))
4076 return build1 (NOP_EXPR, type, expr);
4079 return fold (convert_to_integer (type, expr));
4082 case REFERENCE_TYPE:
4083 /* If converting between two pointers to records denoting
4084 both a template and type, adjust if needed to account
4085 for any differing offsets, since one might be negative. */
4086 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4089 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4090 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4091 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4092 sbitsize_int (BITS_PER_UNIT));
4094 expr = build1 (NOP_EXPR, type, expr);
4095 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4096 if (integer_zerop (byte_diff))
4099 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4100 fold (convert (sizetype, byte_diff)));
4103 /* If converting to a thin pointer, handle specially. */
4104 if (TYPE_THIN_POINTER_P (type)
4105 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4106 return convert_to_thin_pointer (type, expr);
4108 /* If converting fat pointer to normal pointer, get the pointer to the
4109 array and then convert it. */
4110 else if (TYPE_FAT_POINTER_P (etype))
4111 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4114 return fold (convert_to_pointer (type, expr));
4117 return fold (convert_to_real (type, expr));
4120 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4122 gnat_build_constructor
4123 (type, tree_cons (TYPE_FIELDS (type),
4124 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4127 /* ... fall through ... */
4130 /* In these cases, assume the front-end has validated the conversion.
4131 If the conversion is valid, it will be a bit-wise conversion, so
4132 it can be viewed as an unchecked conversion. */
4133 return unchecked_convert (type, expr, false);
4136 /* This is a either a conversion between a tagged type and some
4137 subtype, which we have to mark as a UNION_TYPE because of
4138 overlapping fields or a conversion of an Unchecked_Union. */
4139 return unchecked_convert (type, expr, false);
4141 case UNCONSTRAINED_ARRAY_TYPE:
4142 /* If EXPR is a constrained array, take its address, convert it to a
4143 fat pointer, and then dereference it. Likewise if EXPR is a
4144 record containing both a template and a constrained array.
4145 Note that a record representing a justified modular type
4146 always represents a packed constrained array. */
4147 if (ecode == ARRAY_TYPE
4148 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4149 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4150 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4153 (INDIRECT_REF, NULL_TREE,
4154 convert_to_fat_pointer (TREE_TYPE (type),
4155 build_unary_op (ADDR_EXPR,
4158 /* Do something very similar for converting one unconstrained
4159 array to another. */
4160 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4162 build_unary_op (INDIRECT_REF, NULL_TREE,
4163 convert (TREE_TYPE (type),
4164 build_unary_op (ADDR_EXPR,
4170 return fold (convert_to_complex (type, expr));
4177 /* Remove all conversions that are done in EXP. This includes converting
4178 from a padded type or to a justified modular type. If TRUE_ADDRESS
4179 is true, always return the address of the containing object even if
4180 the address is not bit-aligned. */
4183 remove_conversions (tree exp, bool true_address)
4185 switch (TREE_CODE (exp))
4189 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4190 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4192 remove_conversions (VEC_index (constructor_elt,
4193 CONSTRUCTOR_ELTS (exp), 0)->value,
4198 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4199 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4200 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4203 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4205 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4214 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4215 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4216 likewise return an expression pointing to the underlying array. */
4219 maybe_unconstrained_array (tree exp)
4221 enum tree_code code = TREE_CODE (exp);
4224 switch (TREE_CODE (TREE_TYPE (exp)))
4226 case UNCONSTRAINED_ARRAY_TYPE:
4227 if (code == UNCONSTRAINED_ARRAY_REF)
4230 = build_unary_op (INDIRECT_REF, NULL_TREE,
4231 build_component_ref (TREE_OPERAND (exp, 0),
4232 get_identifier ("P_ARRAY"),
4234 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4238 else if (code == NULL_EXPR)
4239 return build1 (NULL_EXPR,
4240 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4241 (TREE_TYPE (TREE_TYPE (exp))))),
4242 TREE_OPERAND (exp, 0));
4245 /* If this is a padded type, convert to the unpadded type and see if
4246 it contains a template. */
4247 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4249 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4250 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4251 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4253 build_component_ref (new, NULL_TREE,
4254 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4257 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4259 build_component_ref (exp, NULL_TREE,
4260 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4270 /* Return true if EXPR is an expression that can be folded as an operand
4271 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4275 can_fold_for_view_convert_p (tree expr)
4279 /* The folder will fold NOP_EXPRs between integral types with the same
4280 precision (in the middle-end's sense). We cannot allow it if the
4281 types don't have the same precision in the Ada sense as well. */
4282 if (TREE_CODE (expr) != NOP_EXPR)
4285 t1 = TREE_TYPE (expr);
4286 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4288 /* Defer to the folder for non-integral conversions. */
4289 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4292 /* Only fold conversions that preserve both precisions. */
4293 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4294 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4300 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4301 If NOTRUNC_P is true, truncation operations should be suppressed.
4303 Special care is required with (source or target) integral types whose
4304 precision is not equal to their size, to make sure we fetch or assign
4305 the value bits whose location might depend on the endianness, e.g.
4307 Rmsize : constant := 8;
4308 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4310 type Bit_Array is array (1 .. Rmsize) of Boolean;
4311 pragma Pack (Bit_Array);
4313 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4315 Value : Int := 2#1000_0001#;
4316 Vbits : Bit_Array := To_Bit_Array (Value);
4318 we expect the 8 bits at Vbits'Address to always contain Value, while
4319 their original location depends on the endianness, at Value'Address
4320 on a little-endian architecture but not on a big-endian one.
4322 ??? There is a problematic discrepancy between what is called precision
4323 here (and more generally throughout gigi) for integral types and what is
4324 called precision in the middle-end. In the former case it's the RM size
4325 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4326 latter case, the hitch being that they are not equal when they matter,
4327 that is when the number of value bits is not equal to the type's size:
4328 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4329 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4331 The consequence is that gigi must duplicate code bridging the gap between
4332 the type's size and its precision that exists for TYPE_PRECISION in the
4333 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4334 wary of transformations applied in the middle-end based on TYPE_PRECISION
4335 because this value doesn't reflect the actual precision for Ada. */
4338 unchecked_convert (tree type, tree expr, bool notrunc_p)
4340 tree etype = TREE_TYPE (expr);
4342 /* If the expression is already the right type, we are done. */
4346 /* If both types types are integral just do a normal conversion.
4347 Likewise for a conversion to an unconstrained array. */
4348 if ((((INTEGRAL_TYPE_P (type)
4349 && !(TREE_CODE (type) == INTEGER_TYPE
4350 && TYPE_VAX_FLOATING_POINT_P (type)))
4351 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4352 || (TREE_CODE (type) == RECORD_TYPE
4353 && TYPE_JUSTIFIED_MODULAR_P (type)))
4354 && ((INTEGRAL_TYPE_P (etype)
4355 && !(TREE_CODE (etype) == INTEGER_TYPE
4356 && TYPE_VAX_FLOATING_POINT_P (etype)))
4357 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4358 || (TREE_CODE (etype) == RECORD_TYPE
4359 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4360 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4362 if (TREE_CODE (etype) == INTEGER_TYPE
4363 && TYPE_BIASED_REPRESENTATION_P (etype))
4365 tree ntype = copy_type (etype);
4366 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4367 TYPE_MAIN_VARIANT (ntype) = ntype;
4368 expr = build1 (NOP_EXPR, ntype, expr);
4371 if (TREE_CODE (type) == INTEGER_TYPE
4372 && TYPE_BIASED_REPRESENTATION_P (type))
4374 tree rtype = copy_type (type);
4375 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4376 TYPE_MAIN_VARIANT (rtype) = rtype;
4377 expr = convert (rtype, expr);
4378 expr = build1 (NOP_EXPR, type, expr);
4381 /* We have another special case: if we are unchecked converting either
4382 a subtype or a type with limited range into a base type, we need to
4383 ensure that VRP doesn't propagate range information because this
4384 conversion may be done precisely to validate that the object is
4385 within the range it is supposed to have. */
4386 else if (TREE_CODE (expr) != INTEGER_CST
4387 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4388 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4389 || TREE_CODE (etype) == ENUMERAL_TYPE
4390 || TREE_CODE (etype) == BOOLEAN_TYPE))
4392 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4393 in order not to be deemed an useless type conversion, it must
4394 be from subtype to base type.
4396 Therefore we first do the bulk of the conversion to a subtype of
4397 the final type. And this conversion must itself not be deemed
4398 useless if the source type is not a subtype because, otherwise,
4399 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4400 why we toggle the unsigned flag in this conversion, which is
4401 harmless since the final conversion is only a reinterpretation
4404 ??? This may raise addressability and/or aliasing issues because
4405 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4406 address of its operand to be taken if it is deemed addressable
4407 and not already in GIMPLE form. */
4409 = gnat_type_for_mode (TYPE_MODE (type), !TYPE_UNSIGNED (etype));
4410 rtype = copy_type (rtype);
4411 TYPE_MAIN_VARIANT (rtype) = rtype;
4412 TREE_TYPE (rtype) = type;
4413 expr = convert (rtype, expr);
4414 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4418 expr = convert (type, expr);
4421 /* If we are converting to an integral type whose precision is not equal
4422 to its size, first unchecked convert to a record that contains an
4423 object of the output type. Then extract the field. */
4424 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4425 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4426 GET_MODE_BITSIZE (TYPE_MODE (type))))
4428 tree rec_type = make_node (RECORD_TYPE);
4429 tree field = create_field_decl (get_identifier ("OBJ"), type,
4430 rec_type, 1, 0, 0, 0);
4432 TYPE_FIELDS (rec_type) = field;
4433 layout_type (rec_type);
4435 expr = unchecked_convert (rec_type, expr, notrunc_p);
4436 expr = build_component_ref (expr, NULL_TREE, field, 0);
4439 /* Similarly if we are converting from an integral type whose precision
4440 is not equal to its size. */
4441 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4442 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4443 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4445 tree rec_type = make_node (RECORD_TYPE);
4447 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4450 TYPE_FIELDS (rec_type) = field;
4451 layout_type (rec_type);
4453 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4454 expr = unchecked_convert (type, expr, notrunc_p);
4457 /* We have a special case when we are converting between two
4458 unconstrained array types. In that case, take the address,
4459 convert the fat pointer types, and dereference. */
4460 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4461 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4462 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4463 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4464 build_unary_op (ADDR_EXPR, NULL_TREE,
4468 expr = maybe_unconstrained_array (expr);
4469 etype = TREE_TYPE (expr);
4470 if (can_fold_for_view_convert_p (expr))
4471 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4473 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4476 /* If the result is an integral type whose precision is not equal to its
4477 size, sign- or zero-extend the result. We need not do this if the input
4478 is an integral type of the same precision and signedness or if the output
4479 is a biased type or if both the input and output are unsigned. */
4481 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4482 && !(TREE_CODE (type) == INTEGER_TYPE
4483 && TYPE_BIASED_REPRESENTATION_P (type))
4484 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4485 GET_MODE_BITSIZE (TYPE_MODE (type)))
4486 && !(INTEGRAL_TYPE_P (etype)
4487 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4488 && operand_equal_p (TYPE_RM_SIZE (type),
4489 (TYPE_RM_SIZE (etype) != 0
4490 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4492 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4494 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4495 TYPE_UNSIGNED (type));
4497 = convert (base_type,
4498 size_binop (MINUS_EXPR,
4500 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4501 TYPE_RM_SIZE (type)));
4504 build_binary_op (RSHIFT_EXPR, base_type,
4505 build_binary_op (LSHIFT_EXPR, base_type,
4506 convert (base_type, expr),
4511 /* An unchecked conversion should never raise Constraint_Error. The code
4512 below assumes that GCC's conversion routines overflow the same way that
4513 the underlying hardware does. This is probably true. In the rare case
4514 when it is false, we can rely on the fact that such conversions are
4515 erroneous anyway. */
4516 if (TREE_CODE (expr) == INTEGER_CST)
4517 TREE_OVERFLOW (expr) = 0;
4519 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4520 show no longer constant. */
4521 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4522 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4524 TREE_CONSTANT (expr) = 0;
4529 /* Return the appropriate GCC tree code for the specified GNAT type,
4530 the latter being a record type as predicated by Is_Record_Type. */
4533 tree_code_for_record_type (Entity_Id gnat_type)
4535 Node_Id component_list
4536 = Component_List (Type_Definition
4538 (Implementation_Base_Type (gnat_type))));
4541 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4542 we have a non-discriminant field outside a variant. In either case,
4543 it's a RECORD_TYPE. */
4545 if (!Is_Unchecked_Union (gnat_type))
4548 for (component = First_Non_Pragma (Component_Items (component_list));
4549 Present (component);
4550 component = Next_Non_Pragma (component))
4551 if (Ekind (Defining_Entity (component)) == E_Component)
4557 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4558 component of an aggregate type. */
4561 type_for_nonaliased_component_p (tree gnu_type)
4563 /* If the type is passed by reference, we may have pointers to the
4564 component so it cannot be made non-aliased. */
4565 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4568 /* We used to say that any component of aggregate type is aliased
4569 because the front-end may take 'Reference of it. The front-end
4570 has been enhanced in the meantime so as to use a renaming instead
4571 in most cases, but the back-end can probably take the address of
4572 such a component too so we go for the conservative stance.
4574 For instance, we might need the address of any array type, even
4575 if normally passed by copy, to construct a fat pointer if the
4576 component is used as an actual for an unconstrained formal.
4578 Likewise for record types: even if a specific record subtype is
4579 passed by copy, the parent type might be passed by ref (e.g. if
4580 it's of variable size) and we might take the address of a child
4581 component to pass to a parent formal. We have no way to check
4582 for such conditions here. */
4583 if (AGGREGATE_TYPE_P (gnu_type))
4589 /* Perform final processing on global variables. */
4592 gnat_write_global_declarations (void)
4594 /* Proceed to optimize and emit assembly.
4595 FIXME: shouldn't be the front end's responsibility to call this. */
4598 /* Emit debug info for all global declarations. */
4599 emit_debug_global_declarations (VEC_address (tree, global_decls),
4600 VEC_length (tree, global_decls));
4603 /* ************************************************************************
4604 * * GCC builtins support *
4605 * ************************************************************************ */
4607 /* The general scheme is fairly simple:
4609 For each builtin function/type to be declared, gnat_install_builtins calls
4610 internal facilities which eventually get to gnat_push_decl, which in turn
4611 tracks the so declared builtin function decls in the 'builtin_decls' global
4612 datastructure. When an Intrinsic subprogram declaration is processed, we
4613 search this global datastructure to retrieve the associated BUILT_IN DECL
4616 /* Search the chain of currently available builtin declarations for a node
4617 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4618 found, if any, or NULL_TREE otherwise. */
4620 builtin_decl_for (tree name)
4625 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4626 if (DECL_NAME (decl) == name)
4632 /* The code below eventually exposes gnat_install_builtins, which declares
4633 the builtin types and functions we might need, either internally or as
4634 user accessible facilities.
4636 ??? This is a first implementation shot, still in rough shape. It is
4637 heavily inspired from the "C" family implementation, with chunks copied
4638 verbatim from there.
4640 Two obvious TODO candidates are
4641 o Use a more efficient name/decl mapping scheme
4642 o Devise a middle-end infrastructure to avoid having to copy
4643 pieces between front-ends. */
4645 /* ----------------------------------------------------------------------- *
4646 * BUILTIN ELEMENTARY TYPES *
4647 * ----------------------------------------------------------------------- */
4649 /* Standard data types to be used in builtin argument declarations. */
4653 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4655 CTI_CONST_STRING_TYPE,
4660 static tree c_global_trees[CTI_MAX];
4662 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4663 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4664 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4666 /* ??? In addition some attribute handlers, we currently don't support a
4667 (small) number of builtin-types, which in turns inhibits support for a
4668 number of builtin functions. */
4669 #define wint_type_node void_type_node
4670 #define intmax_type_node void_type_node
4671 #define uintmax_type_node void_type_node
4673 /* Build the void_list_node (void_type_node having been created). */
4676 build_void_list_node (void)
4678 tree t = build_tree_list (NULL_TREE, void_type_node);
4682 /* Used to help initialize the builtin-types.def table. When a type of
4683 the correct size doesn't exist, use error_mark_node instead of NULL.
4684 The later results in segfaults even when a decl using the type doesn't
4688 builtin_type_for_size (int size, bool unsignedp)
4690 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4691 return type ? type : error_mark_node;
4694 /* Build/push the elementary type decls that builtin functions/types
4698 install_builtin_elementary_types (void)
4700 signed_size_type_node = size_type_node;
4701 pid_type_node = integer_type_node;
4702 void_list_node = build_void_list_node ();
4704 string_type_node = build_pointer_type (char_type_node);
4705 const_string_type_node
4706 = build_pointer_type (build_qualified_type
4707 (char_type_node, TYPE_QUAL_CONST));
4710 /* ----------------------------------------------------------------------- *
4711 * BUILTIN FUNCTION TYPES *
4712 * ----------------------------------------------------------------------- */
4714 /* Now, builtin function types per se. */
4718 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4719 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4720 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4721 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4722 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4723 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4724 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4725 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4726 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4727 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4728 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4729 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4730 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4731 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4732 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4734 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4735 #include "builtin-types.def"
4736 #undef DEF_PRIMITIVE_TYPE
4737 #undef DEF_FUNCTION_TYPE_0
4738 #undef DEF_FUNCTION_TYPE_1
4739 #undef DEF_FUNCTION_TYPE_2
4740 #undef DEF_FUNCTION_TYPE_3
4741 #undef DEF_FUNCTION_TYPE_4
4742 #undef DEF_FUNCTION_TYPE_5
4743 #undef DEF_FUNCTION_TYPE_6
4744 #undef DEF_FUNCTION_TYPE_7
4745 #undef DEF_FUNCTION_TYPE_VAR_0
4746 #undef DEF_FUNCTION_TYPE_VAR_1
4747 #undef DEF_FUNCTION_TYPE_VAR_2
4748 #undef DEF_FUNCTION_TYPE_VAR_3
4749 #undef DEF_FUNCTION_TYPE_VAR_4
4750 #undef DEF_FUNCTION_TYPE_VAR_5
4751 #undef DEF_POINTER_TYPE
4755 typedef enum c_builtin_type builtin_type;
4757 /* A temporary array used in communication with def_fn_type. */
4758 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4760 /* A helper function for install_builtin_types. Build function type
4761 for DEF with return type RET and N arguments. If VAR is true, then the
4762 function should be variadic after those N arguments.
4764 Takes special care not to ICE if any of the types involved are
4765 error_mark_node, which indicates that said type is not in fact available
4766 (see builtin_type_for_size). In which case the function type as a whole
4767 should be error_mark_node. */
4770 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4772 tree args = NULL, t;
4777 for (i = 0; i < n; ++i)
4779 builtin_type a = va_arg (list, builtin_type);
4780 t = builtin_types[a];
4781 if (t == error_mark_node)
4783 args = tree_cons (NULL_TREE, t, args);
4787 args = nreverse (args);
4789 args = chainon (args, void_list_node);
4791 t = builtin_types[ret];
4792 if (t == error_mark_node)
4794 t = build_function_type (t, args);
4797 builtin_types[def] = t;
4800 /* Build the builtin function types and install them in the builtin_types
4801 array for later use in builtin function decls. */
4804 install_builtin_function_types (void)
4806 tree va_list_ref_type_node;
4807 tree va_list_arg_type_node;
4809 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4811 va_list_arg_type_node = va_list_ref_type_node =
4812 build_pointer_type (TREE_TYPE (va_list_type_node));
4816 va_list_arg_type_node = va_list_type_node;
4817 va_list_ref_type_node = build_reference_type (va_list_type_node);
4820 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4821 builtin_types[ENUM] = VALUE;
4822 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4823 def_fn_type (ENUM, RETURN, 0, 0);
4824 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4825 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4826 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4827 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4828 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4829 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4830 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4831 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4832 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4833 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4834 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4836 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4837 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4839 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4840 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4841 def_fn_type (ENUM, RETURN, 1, 0);
4842 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4843 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4844 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4845 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4846 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4847 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4848 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4849 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4850 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4851 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4852 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4853 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4855 #include "builtin-types.def"
4857 #undef DEF_PRIMITIVE_TYPE
4858 #undef DEF_FUNCTION_TYPE_1
4859 #undef DEF_FUNCTION_TYPE_2
4860 #undef DEF_FUNCTION_TYPE_3
4861 #undef DEF_FUNCTION_TYPE_4
4862 #undef DEF_FUNCTION_TYPE_5
4863 #undef DEF_FUNCTION_TYPE_6
4864 #undef DEF_FUNCTION_TYPE_VAR_0
4865 #undef DEF_FUNCTION_TYPE_VAR_1
4866 #undef DEF_FUNCTION_TYPE_VAR_2
4867 #undef DEF_FUNCTION_TYPE_VAR_3
4868 #undef DEF_FUNCTION_TYPE_VAR_4
4869 #undef DEF_FUNCTION_TYPE_VAR_5
4870 #undef DEF_POINTER_TYPE
4871 builtin_types[(int) BT_LAST] = NULL_TREE;
4874 /* ----------------------------------------------------------------------- *
4875 * BUILTIN ATTRIBUTES *
4876 * ----------------------------------------------------------------------- */
4878 enum built_in_attribute
4880 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4881 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4882 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4883 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4884 #include "builtin-attrs.def"
4885 #undef DEF_ATTR_NULL_TREE
4887 #undef DEF_ATTR_IDENT
4888 #undef DEF_ATTR_TREE_LIST
4892 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4895 install_builtin_attributes (void)
4897 /* Fill in the built_in_attributes array. */
4898 #define DEF_ATTR_NULL_TREE(ENUM) \
4899 built_in_attributes[(int) ENUM] = NULL_TREE;
4900 #define DEF_ATTR_INT(ENUM, VALUE) \
4901 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4902 #define DEF_ATTR_IDENT(ENUM, STRING) \
4903 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4904 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4905 built_in_attributes[(int) ENUM] \
4906 = tree_cons (built_in_attributes[(int) PURPOSE], \
4907 built_in_attributes[(int) VALUE], \
4908 built_in_attributes[(int) CHAIN]);
4909 #include "builtin-attrs.def"
4910 #undef DEF_ATTR_NULL_TREE
4912 #undef DEF_ATTR_IDENT
4913 #undef DEF_ATTR_TREE_LIST
4916 /* Handle a "const" attribute; arguments as in
4917 struct attribute_spec.handler. */
4920 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4921 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4924 if (TREE_CODE (*node) == FUNCTION_DECL)
4925 TREE_READONLY (*node) = 1;
4927 *no_add_attrs = true;
4932 /* Handle a "nothrow" attribute; arguments as in
4933 struct attribute_spec.handler. */
4936 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4937 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4940 if (TREE_CODE (*node) == FUNCTION_DECL)
4941 TREE_NOTHROW (*node) = 1;
4943 *no_add_attrs = true;
4948 /* Handle a "pure" attribute; arguments as in
4949 struct attribute_spec.handler. */
4952 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4953 int ARG_UNUSED (flags), bool *no_add_attrs)
4955 if (TREE_CODE (*node) == FUNCTION_DECL)
4956 DECL_PURE_P (*node) = 1;
4957 /* ??? TODO: Support types. */
4960 warning (OPT_Wattributes, "%qE attribute ignored", name);
4961 *no_add_attrs = true;
4967 /* Handle a "no vops" attribute; arguments as in
4968 struct attribute_spec.handler. */
4971 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4972 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4973 bool *ARG_UNUSED (no_add_attrs))
4975 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4976 DECL_IS_NOVOPS (*node) = 1;
4980 /* Helper for nonnull attribute handling; fetch the operand number
4981 from the attribute argument list. */
4984 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4986 /* Verify the arg number is a constant. */
4987 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4988 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4991 *valp = TREE_INT_CST_LOW (arg_num_expr);
4995 /* Handle the "nonnull" attribute. */
4997 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
4998 tree args, int ARG_UNUSED (flags),
5002 unsigned HOST_WIDE_INT attr_arg_num;
5004 /* If no arguments are specified, all pointer arguments should be
5005 non-null. Verify a full prototype is given so that the arguments
5006 will have the correct types when we actually check them later. */
5009 if (!TYPE_ARG_TYPES (type))
5011 error ("nonnull attribute without arguments on a non-prototype");
5012 *no_add_attrs = true;
5017 /* Argument list specified. Verify that each argument number references
5018 a pointer argument. */
5019 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5022 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5024 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5026 error ("nonnull argument has invalid operand number (argument %lu)",
5027 (unsigned long) attr_arg_num);
5028 *no_add_attrs = true;
5032 argument = TYPE_ARG_TYPES (type);
5035 for (ck_num = 1; ; ck_num++)
5037 if (!argument || ck_num == arg_num)
5039 argument = TREE_CHAIN (argument);
5043 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5045 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5046 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5047 *no_add_attrs = true;
5051 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5053 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5054 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5055 *no_add_attrs = true;
5064 /* Handle a "sentinel" attribute. */
5067 handle_sentinel_attribute (tree *node, tree name, tree args,
5068 int ARG_UNUSED (flags), bool *no_add_attrs)
5070 tree params = TYPE_ARG_TYPES (*node);
5074 warning (OPT_Wattributes,
5075 "%qE attribute requires prototypes with named arguments", name);
5076 *no_add_attrs = true;
5080 while (TREE_CHAIN (params))
5081 params = TREE_CHAIN (params);
5083 if (VOID_TYPE_P (TREE_VALUE (params)))
5085 warning (OPT_Wattributes,
5086 "%qE attribute only applies to variadic functions", name);
5087 *no_add_attrs = true;
5093 tree position = TREE_VALUE (args);
5095 if (TREE_CODE (position) != INTEGER_CST)
5097 warning (0, "requested position is not an integer constant");
5098 *no_add_attrs = true;
5102 if (tree_int_cst_lt (position, integer_zero_node))
5104 warning (0, "requested position is less than zero");
5105 *no_add_attrs = true;
5113 /* Handle a "noreturn" attribute; arguments as in
5114 struct attribute_spec.handler. */
5117 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5118 int ARG_UNUSED (flags), bool *no_add_attrs)
5120 tree type = TREE_TYPE (*node);
5122 /* See FIXME comment in c_common_attribute_table. */
5123 if (TREE_CODE (*node) == FUNCTION_DECL)
5124 TREE_THIS_VOLATILE (*node) = 1;
5125 else if (TREE_CODE (type) == POINTER_TYPE
5126 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5128 = build_pointer_type
5129 (build_type_variant (TREE_TYPE (type),
5130 TYPE_READONLY (TREE_TYPE (type)), 1));
5133 warning (OPT_Wattributes, "%qE attribute ignored", name);
5134 *no_add_attrs = true;
5140 /* Handle a "malloc" attribute; arguments as in
5141 struct attribute_spec.handler. */
5144 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5145 int ARG_UNUSED (flags), bool *no_add_attrs)
5147 if (TREE_CODE (*node) == FUNCTION_DECL
5148 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5149 DECL_IS_MALLOC (*node) = 1;
5152 warning (OPT_Wattributes, "%qE attribute ignored", name);
5153 *no_add_attrs = true;
5159 /* Fake handler for attributes we don't properly support. */
5162 fake_attribute_handler (tree * ARG_UNUSED (node),
5163 tree ARG_UNUSED (name),
5164 tree ARG_UNUSED (args),
5165 int ARG_UNUSED (flags),
5166 bool * ARG_UNUSED (no_add_attrs))
5171 /* Handle a "type_generic" attribute. */
5174 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5175 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5176 bool * ARG_UNUSED (no_add_attrs))
5180 /* Ensure we have a function type. */
5181 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5183 params = TYPE_ARG_TYPES (*node);
5184 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5185 params = TREE_CHAIN (params);
5187 /* Ensure we have a variadic function. */
5188 gcc_assert (!params);
5193 /* ----------------------------------------------------------------------- *
5194 * BUILTIN FUNCTIONS *
5195 * ----------------------------------------------------------------------- */
5197 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5198 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5199 if nonansi_p and flag_no_nonansi_builtin. */
5202 def_builtin_1 (enum built_in_function fncode,
5204 enum built_in_class fnclass,
5205 tree fntype, tree libtype,
5206 bool both_p, bool fallback_p,
5207 bool nonansi_p ATTRIBUTE_UNUSED,
5208 tree fnattrs, bool implicit_p)
5211 const char *libname;
5213 /* Preserve an already installed decl. It most likely was setup in advance
5214 (e.g. as part of the internal builtins) for specific reasons. */
5215 if (built_in_decls[(int) fncode] != NULL_TREE)
5218 gcc_assert ((!both_p && !fallback_p)
5219 || !strncmp (name, "__builtin_",
5220 strlen ("__builtin_")));
5222 libname = name + strlen ("__builtin_");
5223 decl = add_builtin_function (name, fntype, fncode, fnclass,
5224 (fallback_p ? libname : NULL),
5227 /* ??? This is normally further controlled by command-line options
5228 like -fno-builtin, but we don't have them for Ada. */
5229 add_builtin_function (libname, libtype, fncode, fnclass,
5232 built_in_decls[(int) fncode] = decl;
5234 implicit_built_in_decls[(int) fncode] = decl;
5237 static int flag_isoc94 = 0;
5238 static int flag_isoc99 = 0;
5240 /* Install what the common builtins.def offers. */
5243 install_builtin_functions (void)
5245 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5246 NONANSI_P, ATTRS, IMPLICIT, COND) \
5248 def_builtin_1 (ENUM, NAME, CLASS, \
5249 builtin_types[(int) TYPE], \
5250 builtin_types[(int) LIBTYPE], \
5251 BOTH_P, FALLBACK_P, NONANSI_P, \
5252 built_in_attributes[(int) ATTRS], IMPLICIT);
5253 #include "builtins.def"
5257 /* ----------------------------------------------------------------------- *
5258 * BUILTIN FUNCTIONS *
5259 * ----------------------------------------------------------------------- */
5261 /* Install the builtin functions we might need. */
5264 gnat_install_builtins (void)
5266 install_builtin_elementary_types ();
5267 install_builtin_function_types ();
5268 install_builtin_attributes ();
5270 /* Install builtins used by generic middle-end pieces first. Some of these
5271 know about internal specificities and control attributes accordingly, for
5272 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5273 the generic definition from builtins.def. */
5274 build_common_builtin_nodes ();
5276 /* Now, install the target specific builtins, such as the AltiVec family on
5277 ppc, and the common set as exposed by builtins.def. */
5278 targetm.init_builtins ();
5279 install_builtin_functions ();
5282 #include "gt-ada-utils.h"
5283 #include "gtype-ada.h"