1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2005, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
82 /* This variable keeps a table for types for each precision so that we only
83 allocate each of them once. Signed and unsigned types are kept separate.
85 Note that these types are only used when fold-const requests something
86 special. Perhaps we should NOT share these types; we'll see how it
88 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
90 /* Likewise for float types, but record these by mode. */
91 static GTY(()) tree float_types[NUM_MACHINE_MODES];
93 /* For each binding contour we allocate a binding_level structure to indicate
96 struct gnat_binding_level GTY((chain_next ("%h.chain")))
98 /* The binding level containing this one (the enclosing binding level). */
99 struct gnat_binding_level *chain;
100 /* The BLOCK node for this level. */
102 /* If nonzero, the setjmp buffer that needs to be updated for any
103 variable-sized definition within this context. */
107 /* The binding level currently in effect. */
108 static GTY(()) struct gnat_binding_level *current_binding_level;
110 /* A chain of gnat_binding_level structures awaiting reuse. */
111 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
113 /* A chain of unused BLOCK nodes. */
114 static GTY((deletable)) tree free_block_chain;
116 struct language_function GTY(())
121 static void gnat_install_builtins (void);
122 static tree merge_sizes (tree, tree, tree, bool, bool);
123 static tree compute_related_constant (tree, tree);
124 static tree split_plus (tree, tree *);
125 static bool value_zerop (tree);
126 static void gnat_gimplify_function (tree);
127 static tree float_type_for_precision (int, enum machine_mode);
128 static tree convert_to_fat_pointer (tree, tree);
129 static tree convert_to_thin_pointer (tree, tree);
130 static tree make_descriptor_field (const char *,tree, tree, tree);
131 static bool value_factor_p (tree, HOST_WIDE_INT);
132 static bool potential_alignment_gap (tree, tree, tree);
134 /* Initialize the association of GNAT nodes to GCC trees. */
137 init_gnat_to_gnu (void)
139 associate_gnat_to_gnu
140 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
143 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
144 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
145 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
147 If GNU_DECL is zero, a previous association is to be reset. */
150 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
152 /* Check that GNAT_ENTITY is not already defined and that it is being set
153 to something which is a decl. Raise gigi 401 if not. Usually, this
154 means GNAT_ENTITY is defined twice, but occasionally is due to some
156 gcc_assert (!gnu_decl
157 || (!associate_gnat_to_gnu[gnat_entity - First_Node_Id]
158 && (no_check || DECL_P (gnu_decl))));
159 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
162 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
163 Return the ..._DECL node that was associated with it. If there is no tree
164 node associated with GNAT_ENTITY, abort.
166 In some cases, such as delayed elaboration or expressions that need to
167 be elaborated only once, GNAT_ENTITY is really not an entity. */
170 get_gnu_tree (Entity_Id gnat_entity)
172 gcc_assert (associate_gnat_to_gnu[gnat_entity - First_Node_Id]);
173 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
176 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
179 present_gnu_tree (Entity_Id gnat_entity)
181 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id]) != 0;
185 /* Return non-zero if we are currently in the global binding level. */
188 global_bindings_p (void)
190 return ((force_global || !current_function_decl) ? -1 : 0);
193 /* Enter a new binding level. */
198 struct gnat_binding_level *newlevel = NULL;
200 /* Reuse a struct for this binding level, if there is one. */
201 if (free_binding_level)
203 newlevel = free_binding_level;
204 free_binding_level = free_binding_level->chain;
208 = (struct gnat_binding_level *)
209 ggc_alloc (sizeof (struct gnat_binding_level));
211 /* Use a free BLOCK, if any; otherwise, allocate one. */
212 if (free_block_chain)
214 newlevel->block = free_block_chain;
215 free_block_chain = TREE_CHAIN (free_block_chain);
216 TREE_CHAIN (newlevel->block) = NULL_TREE;
219 newlevel->block = make_node (BLOCK);
221 /* Point the BLOCK we just made to its parent. */
222 if (current_binding_level)
223 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
225 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
226 TREE_USED (newlevel->block) = 1;
228 /* Add this level to the front of the chain (stack) of levels that are
230 newlevel->chain = current_binding_level;
231 newlevel->jmpbuf_decl = NULL_TREE;
232 current_binding_level = newlevel;
235 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
236 and point FNDECL to this BLOCK. */
239 set_current_block_context (tree fndecl)
241 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
242 DECL_INITIAL (fndecl) = current_binding_level->block;
245 /* Set the jmpbuf_decl for the current binding level to DECL. */
248 set_block_jmpbuf_decl (tree decl)
250 current_binding_level->jmpbuf_decl = decl;
253 /* Get the jmpbuf_decl, if any, for the current binding level. */
256 get_block_jmpbuf_decl ()
258 return current_binding_level->jmpbuf_decl;
261 /* Exit a binding level. Set any BLOCK into the current code group. */
266 struct gnat_binding_level *level = current_binding_level;
267 tree block = level->block;
269 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
270 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
272 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
273 are no variables free the block and merge its subblocks into those of its
274 parent block. Otherwise, add it to the list of its parent. */
275 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
277 else if (BLOCK_VARS (block) == NULL_TREE)
279 BLOCK_SUBBLOCKS (level->chain->block)
280 = chainon (BLOCK_SUBBLOCKS (block),
281 BLOCK_SUBBLOCKS (level->chain->block));
282 TREE_CHAIN (block) = free_block_chain;
283 free_block_chain = block;
287 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
288 BLOCK_SUBBLOCKS (level->chain->block) = block;
289 TREE_USED (block) = 1;
290 set_block_for_group (block);
293 /* Free this binding structure. */
294 current_binding_level = level->chain;
295 level->chain = free_binding_level;
296 free_binding_level = level;
299 /* Insert BLOCK at the end of the list of subblocks of the
300 current binding level. This is used when a BIND_EXPR is expanded,
301 to handle the BLOCK node inside the BIND_EXPR. */
304 insert_block (tree block)
306 TREE_USED (block) = 1;
307 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
308 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
311 /* Records a ..._DECL node DECL as belonging to the current lexical scope
312 and uses GNAT_NODE for location information and propagating flags. */
315 gnat_pushdecl (tree decl, Node_Id gnat_node)
317 /* If at top level, there is no context. But PARM_DECLs always go in the
318 level of its function. */
319 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
320 DECL_CONTEXT (decl) = 0;
322 DECL_CONTEXT (decl) = current_function_decl;
324 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
326 /* Set the location of DECL and emit a declaration for it. */
327 if (Present (gnat_node))
328 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
329 add_decl_expr (decl, gnat_node);
331 /* Put the declaration on the list. The list of declarations is in reverse
332 order. The list will be reversed later. We don't do this for global
333 variables. Also, don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
334 the list. They will cause trouble with the debugger and aren't needed
336 if (!global_bindings_p ()
337 && (TREE_CODE (decl) != TYPE_DECL
338 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE))
340 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
341 BLOCK_VARS (current_binding_level->block) = decl;
344 /* For the declaration of a type, set its name if it either is not already
345 set, was set to an IDENTIFIER_NODE, indicating an internal name,
346 or if the previous type name was not derived from a source name.
347 We'd rather have the type named with a real name and all the pointer
348 types to the same object have the same POINTER_TYPE node. Code in this
349 function in c-decl.c makes a copy of the type node here, but that may
350 cause us trouble with incomplete types, so let's not try it (at least
353 if (TREE_CODE (decl) == TYPE_DECL
355 && (!TYPE_NAME (TREE_TYPE (decl))
356 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
357 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
358 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
359 && !DECL_ARTIFICIAL (decl))))
360 TYPE_NAME (TREE_TYPE (decl)) = decl;
362 if (TREE_CODE (decl) != CONST_DECL)
363 rest_of_decl_compilation (decl, global_bindings_p (), 0);
366 /* Do little here. Set up the standard declarations later after the
367 front end has been run. */
370 gnat_init_decl_processing (void)
374 /* Make the binding_level structure for global names. */
375 current_function_decl = 0;
376 current_binding_level = 0;
377 free_binding_level = 0;
380 build_common_tree_nodes (true, true);
382 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
383 corresponding to the size of Pmode. In most cases when ptr_mode and
384 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
385 far better code using the width of Pmode. Make this here since we need
386 this before we can expand the GNAT types. */
387 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
388 set_sizetype (size_type_node);
389 build_common_tree_nodes_2 (0);
391 /* Give names and make TYPE_DECLs for common types. */
392 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype),
394 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
397 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
400 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("long integer"),
401 long_integer_type_node),
404 ptr_void_type_node = build_pointer_type (void_type_node);
406 gnat_install_builtins ();
409 /* Install the builtin functions the middle-end needs. */
412 gnat_install_builtins ()
414 /* Builtins used by generic optimizers. */
415 build_common_builtin_nodes ();
417 /* Target specific builtins, such as the AltiVec family on ppc. */
418 targetm.init_builtins ();
421 /* Create the predefined scalar types such as `integer_type_node' needed
422 in the gcc back-end and initialize the global binding level. */
425 init_gigi_decls (tree long_long_float_type, tree exception_type)
430 /* Set the types that GCC and Gigi use from the front end. We would like
431 to do this for char_type_node, but it needs to correspond to the C
433 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
435 /* In this case, the builtin floating point types are VAX float,
436 so make up a type for use. */
437 longest_float_type_node = make_node (REAL_TYPE);
438 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
439 layout_type (longest_float_type_node);
440 create_type_decl (get_identifier ("longest float type"),
441 longest_float_type_node, NULL, false, true, Empty);
444 longest_float_type_node = TREE_TYPE (long_long_float_type);
446 except_type_node = TREE_TYPE (exception_type);
448 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
449 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
450 NULL, false, true, Empty);
452 void_type_decl_node = create_type_decl (get_identifier ("void"),
453 void_type_node, NULL, false, true,
456 void_ftype = build_function_type (void_type_node, NULL_TREE);
457 ptr_void_ftype = build_pointer_type (void_ftype);
459 /* Now declare runtime functions. */
460 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
462 /* malloc is a function declaration tree for a function to allocate
464 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
466 build_function_type (ptr_void_type_node,
467 tree_cons (NULL_TREE,
470 NULL_TREE, false, true, true, NULL,
473 /* free is a function declaration tree for a function to free memory. */
475 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
476 build_function_type (void_type_node,
477 tree_cons (NULL_TREE,
480 NULL_TREE, false, true, true, NULL, Empty);
482 /* Make the types and functions used for exception processing. */
484 = build_array_type (gnat_type_for_mode (Pmode, 0),
485 build_index_type (build_int_cst (NULL_TREE, 5)));
486 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
488 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
490 /* Functions to get and set the jumpbuf pointer for the current thread. */
492 = create_subprog_decl
493 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
494 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
495 NULL_TREE, false, true, true, NULL, Empty);
498 = create_subprog_decl
499 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
501 build_function_type (void_type_node,
502 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
503 NULL_TREE, false, true, true, NULL, Empty);
505 /* Function to get the current exception. */
507 = create_subprog_decl
508 (get_identifier ("system__soft_links__get_gnat_exception"),
510 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
511 NULL_TREE, false, true, true, NULL, Empty);
513 /* Functions that raise exceptions. */
515 = create_subprog_decl
516 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
517 build_function_type (void_type_node,
518 tree_cons (NULL_TREE,
519 build_pointer_type (except_type_node),
521 NULL_TREE, false, true, true, NULL, Empty);
523 /* Dummy objects to materialize "others" and "all others" in the exception
524 tables. These are exported by a-exexpr.adb, so see this unit for the
528 = create_var_decl (get_identifier ("OTHERS"),
529 get_identifier ("__gnat_others_value"),
530 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
533 = create_var_decl (get_identifier ("ALL_OTHERS"),
534 get_identifier ("__gnat_all_others_value"),
535 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
537 /* Hooks to call when entering/leaving an exception handler. */
539 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
540 build_function_type (void_type_node,
541 tree_cons (NULL_TREE,
544 NULL_TREE, false, true, true, NULL, Empty);
547 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
548 build_function_type (void_type_node,
549 tree_cons (NULL_TREE,
552 NULL_TREE, false, true, true, NULL, Empty);
554 /* If in no exception handlers mode, all raise statements are redirected to
555 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
556 this procedure will never be called in this mode. */
557 if (No_Exception_Handlers_Set ())
560 = create_subprog_decl
561 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
562 build_function_type (void_type_node,
563 tree_cons (NULL_TREE,
564 build_pointer_type (char_type_node),
565 tree_cons (NULL_TREE,
568 NULL_TREE, false, true, true, NULL, Empty);
570 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
571 gnat_raise_decls[i] = decl;
574 /* Otherwise, make one decl for each exception reason. */
575 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
579 sprintf (name, "__gnat_rcheck_%.2d", i);
581 = create_subprog_decl
582 (get_identifier (name), NULL_TREE,
583 build_function_type (void_type_node,
584 tree_cons (NULL_TREE,
587 tree_cons (NULL_TREE,
590 NULL_TREE, false, true, true, NULL, Empty);
593 /* Indicate that these never return. */
594 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
595 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
596 TREE_TYPE (raise_nodefer_decl)
597 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
600 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
602 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
603 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
604 TREE_TYPE (gnat_raise_decls[i])
605 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
609 /* setjmp returns an integer and has one operand, which is a pointer to
612 = create_subprog_decl
613 (get_identifier ("__builtin_setjmp"), NULL_TREE,
614 build_function_type (integer_type_node,
615 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
616 NULL_TREE, false, true, true, NULL, Empty);
618 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
619 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
621 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
623 update_setjmp_buf_decl
624 = create_subprog_decl
625 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
626 build_function_type (void_type_node,
627 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
628 NULL_TREE, false, true, true, NULL, Empty);
630 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
631 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
633 main_identifier_node = get_identifier ("main");
636 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL nodes
637 (FIELDLIST), finish constructing the record or union type. If HAS_REP is
638 true, this record has a rep clause; don't call layout_type but merely set
639 the size and alignment ourselves. If DEFER_DEBUG is true, do not call
640 the debugging routines on this type; it will be done later. */
643 finish_record_type (tree record_type, tree fieldlist, bool has_rep,
646 enum tree_code code = TREE_CODE (record_type);
647 tree ada_size = bitsize_zero_node;
648 tree size = bitsize_zero_node;
649 bool var_size = false;
650 bool had_size = TYPE_SIZE (record_type) != 0;
651 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
654 TYPE_FIELDS (record_type) = fieldlist;
655 TYPE_STUB_DECL (record_type)
656 = build_decl (TYPE_DECL, NULL_TREE, record_type);
658 /* We don't need both the typedef name and the record name output in
659 the debugging information, since they are the same. */
660 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
662 /* Globally initialize the record first. If this is a rep'ed record,
663 that just means some initializations; otherwise, layout the record. */
667 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
668 TYPE_MODE (record_type) = BLKmode;
671 TYPE_SIZE_UNIT (record_type) = size_zero_node;
674 TYPE_SIZE (record_type) = bitsize_zero_node;
675 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
676 out just like a UNION_TYPE, since the size will be fixed. */
677 else if (code == QUAL_UNION_TYPE)
682 /* Ensure there isn't a size already set. There can be in an error
683 case where there is a rep clause but all fields have errors and
684 no longer have a position. */
685 TYPE_SIZE (record_type) = 0;
686 layout_type (record_type);
689 /* At this point, the position and size of each field is known. It was
690 either set before entry by a rep clause, or by laying out the type above.
692 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
693 to compute the Ada size; the GCC size and alignment (for rep'ed records
694 that are not padding types); and the mode (for rep'ed records). We also
695 clear the DECL_BIT_FIELD indication for the cases we know have not been
696 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
698 if (code == QUAL_UNION_TYPE)
699 fieldlist = nreverse (fieldlist);
701 for (field = fieldlist; field; field = TREE_CHAIN (field))
703 tree pos = bit_position (field);
705 tree type = TREE_TYPE (field);
706 tree this_size = DECL_SIZE (field);
707 tree this_ada_size = DECL_SIZE (field);
709 /* We need to make an XVE/XVU record if any field has variable size,
710 whether or not the record does. For example, if we have an union,
711 it may be that all fields, rounded up to the alignment, have the
712 same size, in which case we'll use that size. But the debug
713 output routines (except Dwarf2) won't be able to output the fields,
714 so we need to make the special record. */
715 if (TREE_CODE (this_size) != INTEGER_CST)
718 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
719 || TREE_CODE (type) == QUAL_UNION_TYPE)
720 && !TYPE_IS_FAT_POINTER_P (type)
721 && !TYPE_CONTAINS_TEMPLATE_P (type)
722 && TYPE_ADA_SIZE (type))
723 this_ada_size = TYPE_ADA_SIZE (type);
725 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
726 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
727 && value_factor_p (pos, BITS_PER_UNIT)
728 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
729 DECL_BIT_FIELD (field) = 0;
731 /* If we still have DECL_BIT_FIELD set at this point, we know the field
732 is technically not addressable. Except that it can actually be
733 addressed if the field is BLKmode and happens to be properly
735 DECL_NONADDRESSABLE_P (field)
736 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
738 if (has_rep && !DECL_BIT_FIELD (field))
739 TYPE_ALIGN (record_type)
740 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
745 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
746 size = size_binop (MAX_EXPR, size, this_size);
749 case QUAL_UNION_TYPE:
751 = fold (build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
752 this_ada_size, ada_size));
753 size = fold (build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
758 /* Since we know here that all fields are sorted in order of
759 increasing bit position, the size of the record is one
760 higher than the ending bit of the last field processed
761 unless we have a rep clause, since in that case we might
762 have a field outside a QUAL_UNION_TYPE that has a higher ending
763 position. So use a MAX in that case. Also, if this field is a
764 QUAL_UNION_TYPE, we need to take into account the previous size in
765 the case of empty variants. */
767 = merge_sizes (ada_size, pos, this_ada_size,
768 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
769 size = merge_sizes (size, pos, this_size,
770 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
778 if (code == QUAL_UNION_TYPE)
779 nreverse (fieldlist);
781 /* If this is a padding record, we never want to make the size smaller than
782 what was specified in it, if any. */
783 if (TREE_CODE (record_type) == RECORD_TYPE
784 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
785 size = TYPE_SIZE (record_type);
787 /* Now set any of the values we've just computed that apply. */
788 if (!TYPE_IS_FAT_POINTER_P (record_type)
789 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
790 SET_TYPE_ADA_SIZE (record_type, ada_size);
795 = (had_size_unit ? TYPE_SIZE_UNIT (record_type)
796 : convert (sizetype, size_binop (CEIL_DIV_EXPR, size,
797 bitsize_unit_node)));
799 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
800 TYPE_SIZE_UNIT (record_type)
801 = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
803 compute_record_mode (record_type);
808 /* If this record is of variable size, rename it so that the
809 debugger knows it is and make a new, parallel, record
810 that tells the debugger how the record is laid out. See
811 exp_dbug.ads. But don't do this for records that are padding
812 since they confuse GDB. */
814 && !(TREE_CODE (record_type) == RECORD_TYPE
815 && TYPE_IS_PADDING_P (record_type)))
818 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
819 ? UNION_TYPE : TREE_CODE (record_type));
820 tree orig_name = TYPE_NAME (record_type);
822 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
825 = concat_id_with_name (orig_id,
826 TREE_CODE (record_type) == QUAL_UNION_TYPE
828 tree last_pos = bitsize_zero_node;
830 tree prev_old_field = 0;
832 TYPE_NAME (new_record_type) = new_id;
833 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
834 TYPE_STUB_DECL (new_record_type)
835 = build_decl (TYPE_DECL, NULL_TREE, new_record_type);
836 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
837 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
838 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
839 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
840 TYPE_SIZE_UNIT (new_record_type)
841 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
843 /* Now scan all the fields, replacing each field with a new
844 field corresponding to the new encoding. */
845 for (old_field = TYPE_FIELDS (record_type); old_field;
846 old_field = TREE_CHAIN (old_field))
848 tree field_type = TREE_TYPE (old_field);
849 tree field_name = DECL_NAME (old_field);
851 tree curpos = bit_position (old_field);
853 unsigned int align = 0;
856 /* See how the position was modified from the last position.
858 There are two basic cases we support: a value was added
859 to the last position or the last position was rounded to
860 a boundary and they something was added. Check for the
861 first case first. If not, see if there is any evidence
862 of rounding. If so, round the last position and try
865 If this is a union, the position can be taken as zero. */
867 if (TREE_CODE (new_record_type) == UNION_TYPE)
868 pos = bitsize_zero_node, align = 0;
870 pos = compute_related_constant (curpos, last_pos);
872 if (!pos && TREE_CODE (curpos) == MULT_EXPR
873 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
875 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
876 pos = compute_related_constant (curpos,
877 round_up (last_pos, align));
879 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
880 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
881 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
882 && host_integerp (TREE_OPERAND
883 (TREE_OPERAND (curpos, 0), 1),
888 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
889 pos = compute_related_constant (curpos,
890 round_up (last_pos, align));
892 else if (potential_alignment_gap (prev_old_field, old_field,
895 align = TYPE_ALIGN (field_type);
896 pos = compute_related_constant (curpos,
897 round_up (last_pos, align));
900 /* If we can't compute a position, set it to zero.
902 ??? We really should abort here, but it's too much work
903 to get this correct for all cases. */
906 pos = bitsize_zero_node;
908 /* See if this type is variable-size and make a new type
909 and indicate the indirection if so. */
910 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
912 field_type = build_pointer_type (field_type);
916 /* Make a new field name, if necessary. */
917 if (var || align != 0)
922 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
923 align / BITS_PER_UNIT);
925 strcpy (suffix, "XVL");
927 field_name = concat_id_with_name (field_name, suffix);
930 new_field = create_field_decl (field_name, field_type,
932 DECL_SIZE (old_field), pos, 0);
933 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
934 TYPE_FIELDS (new_record_type) = new_field;
936 /* If old_field is a QUAL_UNION_TYPE, take its size as being
937 zero. The only time it's not the last field of the record
938 is when there are other components at fixed positions after
939 it (meaning there was a rep clause for every field) and we
940 want to be able to encode them. */
941 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
942 (TREE_CODE (TREE_TYPE (old_field))
945 : DECL_SIZE (old_field));
946 prev_old_field = old_field;
949 TYPE_FIELDS (new_record_type)
950 = nreverse (TYPE_FIELDS (new_record_type));
952 rest_of_type_compilation (new_record_type, global_bindings_p ());
955 rest_of_type_compilation (record_type, global_bindings_p ());
959 /* Utility function of above to merge LAST_SIZE, the previous size of a record
960 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
961 if this represents a QUAL_UNION_TYPE in which case we must look for
962 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
963 is nonzero, we must take the MAX of the end position of this field
964 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
966 We return an expression for the size. */
969 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
972 tree type = TREE_TYPE (last_size);
975 if (!special || TREE_CODE (size) != COND_EXPR)
977 new = size_binop (PLUS_EXPR, first_bit, size);
979 new = size_binop (MAX_EXPR, last_size, new);
983 new = fold (build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
984 integer_zerop (TREE_OPERAND (size, 1))
985 ? last_size : merge_sizes (last_size, first_bit,
986 TREE_OPERAND (size, 1),
988 integer_zerop (TREE_OPERAND (size, 2))
989 ? last_size : merge_sizes (last_size, first_bit,
990 TREE_OPERAND (size, 2),
993 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
994 when fed through substitute_in_expr) into thinking that a constant
995 size is not constant. */
996 while (TREE_CODE (new) == NON_LVALUE_EXPR)
997 new = TREE_OPERAND (new, 0);
1002 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1003 related by the addition of a constant. Return that constant if so. */
1006 compute_related_constant (tree op0, tree op1)
1008 tree op0_var, op1_var;
1009 tree op0_con = split_plus (op0, &op0_var);
1010 tree op1_con = split_plus (op1, &op1_var);
1011 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1013 if (operand_equal_p (op0_var, op1_var, 0))
1015 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1021 /* Utility function of above to split a tree OP which may be a sum, into a
1022 constant part, which is returned, and a variable part, which is stored
1023 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1027 split_plus (tree in, tree *pvar)
1029 /* Strip NOPS in order to ease the tree traversal and maximize the
1030 potential for constant or plus/minus discovery. We need to be careful
1031 to always return and set *pvar to bitsizetype trees, but it's worth
1035 *pvar = convert (bitsizetype, in);
1037 if (TREE_CODE (in) == INTEGER_CST)
1039 *pvar = bitsize_zero_node;
1040 return convert (bitsizetype, in);
1042 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1044 tree lhs_var, rhs_var;
1045 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1046 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1048 if (lhs_var == TREE_OPERAND (in, 0)
1049 && rhs_var == TREE_OPERAND (in, 1))
1050 return bitsize_zero_node;
1052 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1053 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1056 return bitsize_zero_node;
1059 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1060 subprogram. If it is void_type_node, then we are dealing with a procedure,
1061 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1062 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1063 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1064 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1065 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1066 RETURNS_WITH_DSP is nonzero if the function is to return with a
1067 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1068 is to be passed (as its first parameter) the address of the place to copy
1072 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1073 bool returns_unconstrained, bool returns_by_ref,
1074 bool returns_with_dsp, bool returns_by_target_ptr)
1076 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1077 the subprogram formal parameters. This list is generated by traversing the
1078 input list of PARM_DECL nodes. */
1079 tree param_type_list = NULL;
1083 for (param_decl = param_decl_list; param_decl;
1084 param_decl = TREE_CHAIN (param_decl))
1085 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1088 /* The list of the function parameter types has to be terminated by the void
1089 type to signal to the back-end that we are not dealing with a variable
1090 parameter subprogram, but that the subprogram has a fixed number of
1092 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1094 /* The list of argument types has been created in reverse
1096 param_type_list = nreverse (param_type_list);
1098 type = build_function_type (return_type, param_type_list);
1100 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1101 or the new type should, make a copy of TYPE. Likewise for
1102 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1103 if (TYPE_CI_CO_LIST (type) || cico_list
1104 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1105 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1106 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1107 type = copy_type (type);
1109 TYPE_CI_CO_LIST (type) = cico_list;
1110 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1111 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1112 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1113 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1117 /* Return a copy of TYPE but safe to modify in any way. */
1120 copy_type (tree type)
1122 tree new = copy_node (type);
1124 /* copy_node clears this field instead of copying it, because it is
1125 aliased with TREE_CHAIN. */
1126 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1128 TYPE_POINTER_TO (new) = 0;
1129 TYPE_REFERENCE_TO (new) = 0;
1130 TYPE_MAIN_VARIANT (new) = new;
1131 TYPE_NEXT_VARIANT (new) = 0;
1136 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1137 TYPE_INDEX_TYPE is INDEX. */
1140 create_index_type (tree min, tree max, tree index)
1142 /* First build a type for the desired range. */
1143 tree type = build_index_2_type (min, max);
1145 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1146 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1147 is set, but not to INDEX, make a copy of this type with the requested
1148 index type. Note that we have no way of sharing these types, but that's
1149 only a small hole. */
1150 if (TYPE_INDEX_TYPE (type) == index)
1152 else if (TYPE_INDEX_TYPE (type))
1153 type = copy_type (type);
1155 SET_TYPE_INDEX_TYPE (type, index);
1156 create_type_decl (NULL_TREE, type, NULL, true, false, Empty);
1160 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1161 string) and TYPE is a ..._TYPE node giving its data type.
1162 ARTIFICIAL_P is true if this is a declaration that was generated
1163 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1164 information about this type. GNAT_NODE is used for the position of
1168 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1169 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1171 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1172 enum tree_code code = TREE_CODE (type);
1174 DECL_ARTIFICIAL (type_decl) = artificial_p;
1176 process_attributes (type_decl, attr_list);
1178 /* Pass type declaration information to the debugger unless this is an
1179 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1180 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1181 a dummy type, which will be completed later, or a type for which
1182 debugging information was not requested. */
1183 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1185 DECL_IGNORED_P (type_decl) = 1;
1186 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1187 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1188 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1189 rest_of_decl_compilation (type_decl, global_bindings_p (), 0);
1191 if (!TYPE_IS_DUMMY_P (type))
1192 gnat_pushdecl (type_decl, gnat_node);
1197 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1198 ASM_NAME is its assembler name (if provided). TYPE is its data type
1199 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1200 expression; NULL_TREE if none.
1202 CONST_FLAG is true if this variable is constant.
1204 PUBLIC_FLAG is true if this definition is to be made visible outside of
1205 the current compilation unit. This flag should be set when processing the
1206 variable definitions in a package specification. EXTERN_FLAG is nonzero
1207 when processing an external variable declaration (as opposed to a
1208 definition: no storage is to be allocated for the variable here).
1210 STATIC_FLAG is only relevant when not at top level. In that case
1211 it indicates whether to always allocate storage to the variable.
1213 GNAT_NODE is used for the position of the decl. */
1216 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1217 bool const_flag, bool public_flag, bool extern_flag,
1218 bool static_flag, struct attrib *attr_list, Node_Id gnat_node)
1223 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1224 && (global_bindings_p () || static_flag
1225 ? 0 != initializer_constant_valid_p (var_init,
1226 TREE_TYPE (var_init))
1227 : TREE_CONSTANT (var_init))));
1229 = build_decl ((const_flag && init_const
1230 /* Only make a CONST_DECL for sufficiently-small objects.
1231 We consider complex double "sufficiently-small" */
1232 && TYPE_SIZE (type) != 0
1233 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1234 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1235 GET_MODE_SIZE (DCmode)))
1236 ? CONST_DECL : VAR_DECL, var_name, type);
1238 /* If this is external, throw away any initializations unless this is a
1239 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1240 If we are defining a global here, leave a constant initialization and
1241 save any variable elaborations for the elaboration routine. If we are
1242 just annotating types, throw away the initialization if it isn't a
1244 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1245 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1246 var_init = NULL_TREE;
1248 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1249 try to fiddle with DECL_COMMON. However, on platforms that don't
1250 support global BSS sections, uninitialized global variables would
1251 go in DATA instead, thus increasing the size of the executable. */
1252 #if !defined(ASM_OUTPUT_BSS) && !defined(ASM_OUTPUT_ALIGNED_BSS)
1253 DECL_COMMON (var_decl) = !flag_no_common;
1255 DECL_INITIAL (var_decl) = var_init;
1256 TREE_READONLY (var_decl) = const_flag;
1257 DECL_EXTERNAL (var_decl) = extern_flag;
1258 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1259 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1260 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1261 = TYPE_VOLATILE (type);
1263 /* If it's public and not external, always allocate storage for it.
1264 At the global binding level we need to allocate static storage for the
1265 variable if and only if it's not external. If we are not at the top level
1266 we allocate automatic storage unless requested not to. */
1267 TREE_STATIC (var_decl)
1268 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1271 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1273 process_attributes (var_decl, attr_list);
1275 /* Add this decl to the current binding level. */
1276 gnat_pushdecl (var_decl, gnat_node);
1278 if (TREE_SIDE_EFFECTS (var_decl))
1279 TREE_ADDRESSABLE (var_decl) = 1;
1281 if (TREE_CODE (var_decl) != CONST_DECL)
1282 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1287 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1288 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1289 this field is in a record type with a "pragma pack". If SIZE is nonzero
1290 it is the specified size for this field. If POS is nonzero, it is the bit
1291 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1292 the address of this field for aliasing purposes. If it is negative, we
1293 should not make a bitfield, which is used by make_aligning_type. */
1296 create_field_decl (tree field_name, tree field_type, tree record_type,
1297 int packed, tree size, tree pos, int addressable)
1299 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1301 DECL_CONTEXT (field_decl) = record_type;
1302 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1304 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1305 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1306 if (packed && TYPE_MODE (field_type) == BLKmode)
1307 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1309 /* If a size is specified, use it. Otherwise, if the record type is packed
1310 compute a size to use, which may differ from the object's natural size.
1311 We always set a size in this case to trigger the checks for bitfield
1312 creation below, which is typically required when no position has been
1315 size = convert (bitsizetype, size);
1316 else if (packed == 1)
1318 size = rm_size (field_type);
1320 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1322 if (TREE_CODE (size) == INTEGER_CST
1323 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1324 size = round_up (size, BITS_PER_UNIT);
1327 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1328 specified for two reasons: first if the size differs from the natural
1329 size. Second, if the alignment is insufficient. There are a number of
1330 ways the latter can be true.
1332 We never make a bitfield if the type of the field has a nonconstant size,
1333 because no such entity requiring bitfield operations should reach here.
1335 We do *preventively* make a bitfield when there might be the need for it
1336 but we don't have all the necessary information to decide, as is the case
1337 of a field with no specified position in a packed record.
1339 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1340 in layout_decl or finish_record_type to clear the bit_field indication if
1341 it is in fact not needed. */
1342 if (addressable >= 0
1344 && TREE_CODE (size) == INTEGER_CST
1345 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1346 && (!operand_equal_p (TYPE_SIZE (field_type), size, 0)
1347 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1349 || (TYPE_ALIGN (record_type) != 0
1350 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1352 DECL_BIT_FIELD (field_decl) = 1;
1353 DECL_SIZE (field_decl) = size;
1354 if (!packed && !pos)
1355 DECL_ALIGN (field_decl)
1356 = (TYPE_ALIGN (record_type) != 0
1357 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1358 : TYPE_ALIGN (field_type));
1361 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1362 DECL_ALIGN (field_decl)
1363 = MAX (DECL_ALIGN (field_decl),
1364 DECL_BIT_FIELD (field_decl) ? 1
1365 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1366 : TYPE_ALIGN (field_type));
1370 /* We need to pass in the alignment the DECL is known to have.
1371 This is the lowest-order bit set in POS, but no more than
1372 the alignment of the record, if one is specified. Note
1373 that an alignment of 0 is taken as infinite. */
1374 unsigned int known_align;
1376 if (host_integerp (pos, 1))
1377 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1379 known_align = BITS_PER_UNIT;
1381 if (TYPE_ALIGN (record_type)
1382 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1383 known_align = TYPE_ALIGN (record_type);
1385 layout_decl (field_decl, known_align);
1386 SET_DECL_OFFSET_ALIGN (field_decl,
1387 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1389 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1390 &DECL_FIELD_BIT_OFFSET (field_decl),
1391 DECL_OFFSET_ALIGN (field_decl), pos);
1393 DECL_HAS_REP_P (field_decl) = 1;
1396 /* If the field type is passed by reference, we will have pointers to the
1397 field, so it is addressable. */
1398 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1401 /* ??? For now, we say that any field of aggregate type is addressable
1402 because the front end may take 'Reference of it. */
1403 if (AGGREGATE_TYPE_P (field_type))
1406 /* Mark the decl as nonaddressable if it is indicated so semantically,
1407 meaning we won't ever attempt to take the address of the field.
1409 It may also be "technically" nonaddressable, meaning that even if we
1410 attempt to take the field's address we will actually get the address of a
1411 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1412 we have at this point is not accurate enough, so we don't account for
1413 this here and let finish_record_type decide. */
1414 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1419 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1420 effects, has the value of zero. */
1423 value_zerop (tree exp)
1425 if (TREE_CODE (exp) == COMPOUND_EXPR)
1426 return value_zerop (TREE_OPERAND (exp, 1));
1428 return integer_zerop (exp);
1431 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1432 PARAM_TYPE is its type. READONLY is true if the parameter is
1433 readonly (either an IN parameter or an address of a pass-by-ref
1437 create_param_decl (tree param_name, tree param_type, bool readonly)
1439 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1441 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1442 lead to various ABI violations. */
1443 if (targetm.calls.promote_prototypes (param_type)
1444 && (TREE_CODE (param_type) == INTEGER_TYPE
1445 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1446 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1448 /* We have to be careful about biased types here. Make a subtype
1449 of integer_type_node with the proper biasing. */
1450 if (TREE_CODE (param_type) == INTEGER_TYPE
1451 && TYPE_BIASED_REPRESENTATION_P (param_type))
1454 = copy_type (build_range_type (integer_type_node,
1455 TYPE_MIN_VALUE (param_type),
1456 TYPE_MAX_VALUE (param_type)));
1458 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1461 param_type = integer_type_node;
1464 DECL_ARG_TYPE (param_decl) = param_type;
1465 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1466 TREE_READONLY (param_decl) = readonly;
1470 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1473 process_attributes (tree decl, struct attrib *attr_list)
1475 for (; attr_list; attr_list = attr_list->next)
1476 switch (attr_list->type)
1478 case ATTR_MACHINE_ATTRIBUTE:
1479 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1481 ATTR_FLAG_TYPE_IN_PLACE);
1484 case ATTR_LINK_ALIAS:
1485 TREE_STATIC (decl) = 1;
1486 assemble_alias (decl, attr_list->name);
1489 case ATTR_WEAK_EXTERNAL:
1491 declare_weak (decl);
1493 post_error ("?weak declarations not supported on this target",
1494 attr_list->error_point);
1497 case ATTR_LINK_SECTION:
1498 if (targetm.have_named_sections)
1500 DECL_SECTION_NAME (decl)
1501 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1502 IDENTIFIER_POINTER (attr_list->name));
1503 DECL_COMMON (decl) = 0;
1506 post_error ("?section attributes are not supported for this target",
1507 attr_list->error_point);
1512 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1516 value_factor_p (tree value, HOST_WIDE_INT factor)
1518 if (host_integerp (value, 1))
1519 return tree_low_cst (value, 1) % factor == 0;
1521 if (TREE_CODE (value) == MULT_EXPR)
1522 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1523 || value_factor_p (TREE_OPERAND (value, 1), factor));
1528 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1529 unless we can prove these 2 fields are laid out in such a way that no gap
1530 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1531 is the distance in bits between the end of PREV_FIELD and the starting
1532 position of CURR_FIELD. It is ignored if null. */
1535 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1537 /* If this is the first field of the record, there cannot be any gap */
1541 /* If the previous field is a union type, then return False: The only
1542 time when such a field is not the last field of the record is when
1543 there are other components at fixed positions after it (meaning there
1544 was a rep clause for every field), in which case we don't want the
1545 alignment constraint to override them. */
1546 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1549 /* If the distance between the end of prev_field and the begining of
1550 curr_field is constant, then there is a gap if the value of this
1551 constant is not null. */
1552 if (offset && host_integerp (offset, 1))
1553 return !integer_zerop (offset);
1555 /* If the size and position of the previous field are constant,
1556 then check the sum of this size and position. There will be a gap
1557 iff it is not multiple of the current field alignment. */
1558 if (host_integerp (DECL_SIZE (prev_field), 1)
1559 && host_integerp (bit_position (prev_field), 1))
1560 return ((tree_low_cst (bit_position (prev_field), 1)
1561 + tree_low_cst (DECL_SIZE (prev_field), 1))
1562 % DECL_ALIGN (curr_field) != 0);
1564 /* If both the position and size of the previous field are multiples
1565 of the current field alignment, there can not be any gap. */
1566 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1567 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1570 /* Fallback, return that there may be a potential gap */
1574 /* Returns a LABEL_DECL node for LABEL_NAME. */
1577 create_label_decl (tree label_name)
1579 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1581 DECL_CONTEXT (label_decl) = current_function_decl;
1582 DECL_MODE (label_decl) = VOIDmode;
1583 DECL_SOURCE_LOCATION (label_decl) = input_location;
1588 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1589 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1590 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1591 PARM_DECL nodes chained through the TREE_CHAIN field).
1593 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1594 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1597 create_subprog_decl (tree subprog_name, tree asm_name,
1598 tree subprog_type, tree param_decl_list, bool inline_flag,
1599 bool public_flag, bool extern_flag,
1600 struct attrib *attr_list, Node_Id gnat_node)
1602 tree return_type = TREE_TYPE (subprog_type);
1603 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1605 /* If this is a function nested inside an inlined external function, it
1606 means we aren't going to compile the outer function unless it is
1607 actually inlined, so do the same for us. */
1608 if (current_function_decl && DECL_INLINE (current_function_decl)
1609 && DECL_EXTERNAL (current_function_decl))
1612 DECL_EXTERNAL (subprog_decl) = extern_flag;
1613 TREE_PUBLIC (subprog_decl) = public_flag;
1614 TREE_STATIC (subprog_decl) = 1;
1615 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1616 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1617 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1618 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1619 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1620 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1621 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1624 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1627 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1629 process_attributes (subprog_decl, attr_list);
1631 /* Add this decl to the current binding level. */
1632 gnat_pushdecl (subprog_decl, gnat_node);
1634 /* Output the assembler code and/or RTL for the declaration. */
1635 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1637 return subprog_decl;
1640 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1641 body. This routine needs to be invoked before processing the declarations
1642 appearing in the subprogram. */
1645 begin_subprog_body (tree subprog_decl)
1649 current_function_decl = subprog_decl;
1650 announce_function (subprog_decl);
1652 /* Enter a new binding level and show that all the parameters belong to
1655 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1656 param_decl = TREE_CHAIN (param_decl))
1657 DECL_CONTEXT (param_decl) = subprog_decl;
1659 make_decl_rtl (subprog_decl);
1661 /* We handle pending sizes via the elaboration of types, so we don't need to
1662 save them. This causes them to be marked as part of the outer function
1663 and then discarded. */
1664 get_pending_sizes ();
1667 /* Finish the definition of the current subprogram and compile it all the way
1668 to assembler language output. BODY is the tree corresponding to
1672 end_subprog_body (tree body)
1674 tree fndecl = current_function_decl;
1676 /* Mark the BLOCK for this level as being for this function and pop the
1677 level. Since the vars in it are the parameters, clear them. */
1678 BLOCK_VARS (current_binding_level->block) = 0;
1679 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1680 DECL_INITIAL (fndecl) = current_binding_level->block;
1683 /* Deal with inline. If declared inline or we should default to inline,
1684 set the flag in the decl. */
1685 DECL_INLINE (fndecl)
1686 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
1688 /* We handle pending sizes via the elaboration of types, so we don't
1689 need to save them. */
1690 get_pending_sizes ();
1692 /* Mark the RESULT_DECL as being in this subprogram. */
1693 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1695 DECL_SAVED_TREE (fndecl) = body;
1697 current_function_decl = DECL_CONTEXT (fndecl);
1700 /* If we're only annotating types, don't actually compile this function. */
1701 if (type_annotate_only)
1704 /* We do different things for nested and non-nested functions.
1705 ??? This should be in cgraph. */
1706 if (!DECL_CONTEXT (fndecl))
1708 gnat_gimplify_function (fndecl);
1709 cgraph_finalize_function (fndecl, false);
1712 /* Register this function with cgraph just far enough to get it
1713 added to our parent's nested function list. */
1714 (void) cgraph_node (fndecl);
1717 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1720 gnat_gimplify_function (tree fndecl)
1722 struct cgraph_node *cgn;
1724 dump_function (TDI_original, fndecl);
1725 gimplify_function_tree (fndecl);
1726 dump_function (TDI_generic, fndecl);
1728 /* Convert all nested functions to GIMPLE now. We do things in this order
1729 so that items like VLA sizes are expanded properly in the context of the
1730 correct function. */
1731 cgn = cgraph_node (fndecl);
1732 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1733 gnat_gimplify_function (cgn->decl);
1736 /* Return a definition for a builtin function named NAME and whose data type
1737 is TYPE. TYPE should be a function type with argument types.
1738 FUNCTION_CODE tells later passes how to compile calls to this function.
1739 See tree.h for its possible values.
1741 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1742 the name to be called if we can't opencode the function. If
1743 ATTRS is nonzero, use that for the function attribute list. */
1746 builtin_function (const char *name, tree type, int function_code,
1747 enum built_in_class class, const char *library_name,
1750 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1752 DECL_EXTERNAL (decl) = 1;
1753 TREE_PUBLIC (decl) = 1;
1755 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1757 gnat_pushdecl (decl, Empty);
1758 DECL_BUILT_IN_CLASS (decl) = class;
1759 DECL_FUNCTION_CODE (decl) = function_code;
1761 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1765 /* Return an integer type with the number of bits of precision given by
1766 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1767 it is a signed type. */
1770 gnat_type_for_size (unsigned precision, int unsignedp)
1775 if (precision <= 2 * MAX_BITS_PER_WORD
1776 && signed_and_unsigned_types[precision][unsignedp])
1777 return signed_and_unsigned_types[precision][unsignedp];
1780 t = make_unsigned_type (precision);
1782 t = make_signed_type (precision);
1784 if (precision <= 2 * MAX_BITS_PER_WORD)
1785 signed_and_unsigned_types[precision][unsignedp] = t;
1789 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1790 TYPE_NAME (t) = get_identifier (type_name);
1796 /* Likewise for floating-point types. */
1799 float_type_for_precision (int precision, enum machine_mode mode)
1804 if (float_types[(int) mode])
1805 return float_types[(int) mode];
1807 float_types[(int) mode] = t = make_node (REAL_TYPE);
1808 TYPE_PRECISION (t) = precision;
1811 gcc_assert (TYPE_MODE (t) == mode);
1814 sprintf (type_name, "FLOAT_%d", precision);
1815 TYPE_NAME (t) = get_identifier (type_name);
1821 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1822 an unsigned type; otherwise a signed type is returned. */
1825 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1827 if (mode == BLKmode)
1829 else if (mode == VOIDmode)
1830 return void_type_node;
1831 else if (COMPLEX_MODE_P (mode))
1833 else if (SCALAR_FLOAT_MODE_P (mode))
1834 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1835 else if (SCALAR_INT_MODE_P (mode))
1836 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1841 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1844 gnat_unsigned_type (tree type_node)
1846 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1848 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1850 type = copy_node (type);
1851 TREE_TYPE (type) = type_node;
1853 else if (TREE_TYPE (type_node)
1854 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1855 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1857 type = copy_node (type);
1858 TREE_TYPE (type) = TREE_TYPE (type_node);
1864 /* Return the signed version of a TYPE_NODE, a scalar type. */
1867 gnat_signed_type (tree type_node)
1869 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
1871 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1873 type = copy_node (type);
1874 TREE_TYPE (type) = type_node;
1876 else if (TREE_TYPE (type_node)
1877 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1878 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1880 type = copy_node (type);
1881 TREE_TYPE (type) = TREE_TYPE (type_node);
1887 /* Return a type the same as TYPE except unsigned or signed according to
1891 gnat_signed_or_unsigned_type (int unsignedp, tree type)
1893 if (!INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
1896 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
1899 /* EXP is an expression for the size of an object. If this size contains
1900 discriminant references, replace them with the maximum (if MAX_P) or
1901 minimum (if !MAX_P) possible value of the discriminant. */
1904 max_size (tree exp, bool max_p)
1906 enum tree_code code = TREE_CODE (exp);
1907 tree type = TREE_TYPE (exp);
1909 switch (TREE_CODE_CLASS (code))
1911 case tcc_declaration:
1915 case tcc_exceptional:
1916 if (code == TREE_LIST)
1917 return tree_cons (TREE_PURPOSE (exp),
1918 max_size (TREE_VALUE (exp), max_p),
1920 ? max_size (TREE_CHAIN (exp), max_p) : NULL_TREE);
1924 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
1925 modify. Otherwise, we treat it like a variable. */
1926 if (!CONTAINS_PLACEHOLDER_P (exp))
1929 type = TREE_TYPE (TREE_OPERAND (exp, 1));
1931 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
1933 case tcc_comparison:
1934 return max_p ? size_one_node : size_zero_node;
1938 case tcc_expression:
1939 switch (TREE_CODE_LENGTH (code))
1942 if (code == NON_LVALUE_EXPR)
1943 return max_size (TREE_OPERAND (exp, 0), max_p);
1946 fold (build1 (code, type,
1947 max_size (TREE_OPERAND (exp, 0),
1948 code == NEGATE_EXPR ? !max_p : max_p)));
1951 if (code == COMPOUND_EXPR)
1952 return max_size (TREE_OPERAND (exp, 1), max_p);
1955 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
1956 tree rhs = max_size (TREE_OPERAND (exp, 1),
1957 code == MINUS_EXPR ? !max_p : max_p);
1959 /* Special-case wanting the maximum value of a MIN_EXPR.
1960 In that case, if one side overflows, return the other.
1961 sizetype is signed, but we know sizes are non-negative.
1962 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
1963 overflowing or the maximum possible value and the RHS
1965 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
1967 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
1969 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1970 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
1971 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
1972 && !TREE_CONSTANT (rhs))
1975 return fold (build2 (code, type, lhs, rhs));
1979 if (code == SAVE_EXPR)
1981 else if (code == COND_EXPR)
1982 return fold (build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
1983 max_size (TREE_OPERAND (exp, 1), max_p),
1984 max_size (TREE_OPERAND (exp, 2), max_p)));
1985 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1))
1986 return build3 (CALL_EXPR, type, TREE_OPERAND (exp, 0),
1987 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
1990 /* Other tree classes cannot happen. */
1998 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
1999 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2000 Return a constructor for the template. */
2003 build_template (tree template_type, tree array_type, tree expr)
2005 tree template_elts = NULL_TREE;
2006 tree bound_list = NULL_TREE;
2009 if (TREE_CODE (array_type) == RECORD_TYPE
2010 && (TYPE_IS_PADDING_P (array_type)
2011 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2012 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2014 if (TREE_CODE (array_type) == ARRAY_TYPE
2015 || (TREE_CODE (array_type) == INTEGER_TYPE
2016 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2017 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2019 /* First make the list for a CONSTRUCTOR for the template. Go down the
2020 field list of the template instead of the type chain because this
2021 array might be an Ada array of arrays and we can't tell where the
2022 nested arrays stop being the underlying object. */
2024 for (field = TYPE_FIELDS (template_type); field;
2026 ? (bound_list = TREE_CHAIN (bound_list))
2027 : (array_type = TREE_TYPE (array_type))),
2028 field = TREE_CHAIN (TREE_CHAIN (field)))
2030 tree bounds, min, max;
2032 /* If we have a bound list, get the bounds from there. Likewise
2033 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2034 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2035 This will give us a maximum range. */
2037 bounds = TREE_VALUE (bound_list);
2038 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2039 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2040 else if (expr && TREE_CODE (expr) == PARM_DECL
2041 && DECL_BY_COMPONENT_PTR_P (expr))
2042 bounds = TREE_TYPE (field);
2046 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2047 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2049 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2050 substitute it from OBJECT. */
2051 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2052 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2054 template_elts = tree_cons (TREE_CHAIN (field), max,
2055 tree_cons (field, min, template_elts));
2058 return gnat_build_constructor (template_type, nreverse (template_elts));
2061 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2062 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2063 in the type contains in its DECL_INITIAL the expression to use when
2064 a constructor is made for the type. GNAT_ENTITY is an entity used
2065 to print out an error message if the mechanism cannot be applied to
2066 an object of that type and also for the name. */
2069 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2071 tree record_type = make_node (RECORD_TYPE);
2072 tree field_list = 0;
2081 /* If TYPE is an unconstrained array, use the underlying array type. */
2082 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2083 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2085 /* If this is an array, compute the number of dimensions in the array,
2086 get the index types, and point to the inner type. */
2087 if (TREE_CODE (type) != ARRAY_TYPE)
2090 for (ndim = 1, inner_type = type;
2091 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2092 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2093 ndim++, inner_type = TREE_TYPE (inner_type))
2096 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2098 if (mech != By_Descriptor_NCA
2099 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2100 for (i = ndim - 1, inner_type = type;
2102 i--, inner_type = TREE_TYPE (inner_type))
2103 idx_arr[i] = TYPE_DOMAIN (inner_type);
2105 for (i = 0, inner_type = type;
2107 i++, inner_type = TREE_TYPE (inner_type))
2108 idx_arr[i] = TYPE_DOMAIN (inner_type);
2110 /* Now get the DTYPE value. */
2111 switch (TREE_CODE (type))
2115 if (TYPE_VAX_FLOATING_POINT_P (type))
2116 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2129 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2132 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2135 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2138 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2141 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2144 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2150 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2154 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2155 && TYPE_VAX_FLOATING_POINT_P (type))
2156 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2168 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2179 /* Get the CLASS value. */
2182 case By_Descriptor_A:
2185 case By_Descriptor_NCA:
2188 case By_Descriptor_SB:
2195 /* Make the type for a descriptor for VMS. The first four fields
2196 are the same for all types. */
2199 = chainon (field_list,
2200 make_descriptor_field
2201 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2202 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2204 field_list = chainon (field_list,
2205 make_descriptor_field ("DTYPE",
2206 gnat_type_for_size (8, 1),
2207 record_type, size_int (dtype)));
2208 field_list = chainon (field_list,
2209 make_descriptor_field ("CLASS",
2210 gnat_type_for_size (8, 1),
2211 record_type, size_int (class)));
2214 = chainon (field_list,
2215 make_descriptor_field
2217 build_pointer_type_for_mode (type, SImode, false), record_type,
2219 build_pointer_type_for_mode (type, SImode, false),
2220 build0 (PLACEHOLDER_EXPR, type))));
2225 case By_Descriptor_S:
2228 case By_Descriptor_SB:
2230 = chainon (field_list,
2231 make_descriptor_field
2232 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2233 TREE_CODE (type) == ARRAY_TYPE
2234 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2236 = chainon (field_list,
2237 make_descriptor_field
2238 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2239 TREE_CODE (type) == ARRAY_TYPE
2240 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2243 case By_Descriptor_A:
2244 case By_Descriptor_NCA:
2245 field_list = chainon (field_list,
2246 make_descriptor_field ("SCALE",
2247 gnat_type_for_size (8, 1),
2251 field_list = chainon (field_list,
2252 make_descriptor_field ("DIGITS",
2253 gnat_type_for_size (8, 1),
2258 = chainon (field_list,
2259 make_descriptor_field
2260 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2261 size_int (mech == By_Descriptor_NCA
2263 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2264 : (TREE_CODE (type) == ARRAY_TYPE
2265 && TYPE_CONVENTION_FORTRAN_P (type)
2268 field_list = chainon (field_list,
2269 make_descriptor_field ("DIMCT",
2270 gnat_type_for_size (8, 1),
2274 field_list = chainon (field_list,
2275 make_descriptor_field ("ARSIZE",
2276 gnat_type_for_size (32, 1),
2278 size_in_bytes (type)));
2280 /* Now build a pointer to the 0,0,0... element. */
2281 tem = build0 (PLACEHOLDER_EXPR, type);
2282 for (i = 0, inner_type = type; i < ndim;
2283 i++, inner_type = TREE_TYPE (inner_type))
2284 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2285 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2286 NULL_TREE, NULL_TREE);
2289 = chainon (field_list,
2290 make_descriptor_field
2292 build_pointer_type_for_mode (inner_type, SImode, false),
2295 build_pointer_type_for_mode (inner_type, SImode,
2299 /* Next come the addressing coefficients. */
2301 for (i = 0; i < ndim; i++)
2305 = size_binop (MULT_EXPR, tem,
2306 size_binop (PLUS_EXPR,
2307 size_binop (MINUS_EXPR,
2308 TYPE_MAX_VALUE (idx_arr[i]),
2309 TYPE_MIN_VALUE (idx_arr[i])),
2312 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2313 fname[1] = '0' + i, fname[2] = 0;
2315 = chainon (field_list,
2316 make_descriptor_field (fname,
2317 gnat_type_for_size (32, 1),
2318 record_type, idx_length));
2320 if (mech == By_Descriptor_NCA)
2324 /* Finally here are the bounds. */
2325 for (i = 0; i < ndim; i++)
2329 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2331 = chainon (field_list,
2332 make_descriptor_field
2333 (fname, gnat_type_for_size (32, 1), record_type,
2334 TYPE_MIN_VALUE (idx_arr[i])));
2338 = chainon (field_list,
2339 make_descriptor_field
2340 (fname, gnat_type_for_size (32, 1), record_type,
2341 TYPE_MAX_VALUE (idx_arr[i])));
2346 post_error ("unsupported descriptor type for &", gnat_entity);
2349 finish_record_type (record_type, field_list, false, true);
2350 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2351 NULL, true, false, gnat_entity);
2356 /* Utility routine for above code to make a field. */
2359 make_descriptor_field (const char *name, tree type,
2360 tree rec_type, tree initial)
2363 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2365 DECL_INITIAL (field) = initial;
2369 /* Build a type to be used to represent an aliased object whose nominal
2370 type is an unconstrained array. This consists of a RECORD_TYPE containing
2371 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2372 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2373 is used to represent an arbitrary unconstrained object. Use NAME
2374 as the name of the record. */
2377 build_unc_object_type (tree template_type, tree object_type, tree name)
2379 tree type = make_node (RECORD_TYPE);
2380 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2381 template_type, type, 0, 0, 0, 1);
2382 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2385 TYPE_NAME (type) = name;
2386 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2387 finish_record_type (type,
2388 chainon (chainon (NULL_TREE, template_field),
2395 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2396 the normal case this is just two adjustments, but we have more to do
2397 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2400 update_pointer_to (tree old_type, tree new_type)
2402 tree ptr = TYPE_POINTER_TO (old_type);
2403 tree ref = TYPE_REFERENCE_TO (old_type);
2407 /* If this is the main variant, process all the other variants first. */
2408 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2409 for (type = TYPE_NEXT_VARIANT (old_type); type;
2410 type = TYPE_NEXT_VARIANT (type))
2411 update_pointer_to (type, new_type);
2413 /* If no pointer or reference, we are done. */
2417 /* Merge the old type qualifiers in the new type.
2419 Each old variant has qualifiers for specific reasons, and the new
2420 designated type as well. Each set of qualifiers represents useful
2421 information grabbed at some point, and merging the two simply unifies
2422 these inputs into the final type description.
2424 Consider for instance a volatile type frozen after an access to constant
2425 type designating it. After the designated type freeze, we get here with a
2426 volatile new_type and a dummy old_type with a readonly variant, created
2427 when the access type was processed. We shall make a volatile and readonly
2428 designated type, because that's what it really is.
2430 We might also get here for a non-dummy old_type variant with different
2431 qualifiers than the new_type ones, for instance in some cases of pointers
2432 to private record type elaboration (see the comments around the call to
2433 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2434 qualifiers in thoses cases too, to avoid accidentally discarding the
2435 initial set, and will often end up with old_type == new_type then. */
2436 new_type = build_qualified_type (new_type,
2437 TYPE_QUALS (old_type)
2438 | TYPE_QUALS (new_type));
2440 /* If the new type and the old one are identical, there is nothing to
2442 if (old_type == new_type)
2445 /* Otherwise, first handle the simple case. */
2446 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2448 TYPE_POINTER_TO (new_type) = ptr;
2449 TYPE_REFERENCE_TO (new_type) = ref;
2451 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2452 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
2453 ptr1 = TYPE_NEXT_VARIANT (ptr1))
2455 TREE_TYPE (ptr1) = new_type;
2457 if (TYPE_NAME (ptr1)
2458 && TREE_CODE (TYPE_NAME (ptr1)) == TYPE_DECL
2459 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2460 rest_of_decl_compilation (TYPE_NAME (ptr1),
2461 global_bindings_p (), 0);
2464 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2465 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
2466 ref1 = TYPE_NEXT_VARIANT (ref1))
2468 TREE_TYPE (ref1) = new_type;
2470 if (TYPE_NAME (ref1)
2471 && TREE_CODE (TYPE_NAME (ref1)) == TYPE_DECL
2472 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2473 rest_of_decl_compilation (TYPE_NAME (ref1),
2474 global_bindings_p (), 0);
2478 /* Now deal with the unconstrained array case. In this case the "pointer"
2479 is actually a RECORD_TYPE where the types of both fields are
2480 pointers to void. In that case, copy the field list from the
2481 old type to the new one and update the fields' context. */
2482 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
2487 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2492 SET_DECL_ORIGINAL_FIELD (TYPE_FIELDS (ptr),
2493 TYPE_FIELDS (TYPE_POINTER_TO (new_type)));
2494 SET_DECL_ORIGINAL_FIELD (TREE_CHAIN (TYPE_FIELDS (ptr)),
2495 TREE_CHAIN (TYPE_FIELDS
2496 (TYPE_POINTER_TO (new_type))));
2498 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2499 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2500 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2502 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2505 ??? This is now the only use of gnat_substitute_in_type, which
2506 is now a very "heavy" routine to do this, so it should be replaced
2508 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2509 new_ref = build3 (COMPONENT_REF, ptr_temp_type,
2510 build0 (PLACEHOLDER_EXPR, ptr),
2511 TREE_CHAIN (TYPE_FIELDS (ptr)), NULL_TREE);
2514 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2515 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2516 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2518 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2519 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2521 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2522 = TREE_TYPE (new_type) = ptr;
2524 /* Now handle updating the allocation record, what the thin pointer
2525 points to. Update all pointers from the old record into the new
2526 one, update the types of the fields, and recompute the size. */
2528 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2530 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2531 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2532 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2533 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2534 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2535 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2536 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2538 TYPE_SIZE (new_obj_rec)
2539 = size_binop (PLUS_EXPR,
2540 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2541 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2542 TYPE_SIZE_UNIT (new_obj_rec)
2543 = size_binop (PLUS_EXPR,
2544 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2545 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2546 rest_of_type_compilation (ptr, global_bindings_p ());
2550 /* Convert a pointer to a constrained array into a pointer to a fat
2551 pointer. This involves making or finding a template. */
2554 convert_to_fat_pointer (tree type, tree expr)
2556 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2557 tree template, template_addr;
2558 tree etype = TREE_TYPE (expr);
2560 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2561 pointer to the template and array. */
2562 if (integer_zerop (expr))
2564 gnat_build_constructor
2566 tree_cons (TYPE_FIELDS (type),
2567 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2568 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2569 convert (build_pointer_type (template_type),
2573 /* If EXPR is a thin pointer, make the template and data from the record. */
2575 else if (TYPE_THIN_POINTER_P (etype))
2577 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2579 expr = save_expr (expr);
2580 if (TREE_CODE (expr) == ADDR_EXPR)
2581 expr = TREE_OPERAND (expr, 0);
2583 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2585 template = build_component_ref (expr, NULL_TREE, fields, false);
2586 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2587 build_component_ref (expr, NULL_TREE,
2588 TREE_CHAIN (fields), false));
2591 /* Otherwise, build the constructor for the template. */
2592 template = build_template (template_type, TREE_TYPE (etype), expr);
2594 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2596 /* The result is a CONSTRUCTOR for the fat pointer.
2598 If expr is an argument of a foreign convention subprogram, the type it
2599 points to is directly the component type. In this case, the expression
2600 type may not match the corresponding FIELD_DECL type at this point, so we
2601 call "convert" here to fix that up if necessary. This type consistency is
2602 required, for instance because it ensures that possible later folding of
2603 component_refs against this constructor always yields something of the
2604 same type as the initial reference.
2606 Note that the call to "build_template" above is still fine, because it
2607 will only refer to the provided template_type in this case. */
2609 gnat_build_constructor
2610 (type, tree_cons (TYPE_FIELDS (type),
2611 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2612 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2613 template_addr, NULL_TREE)));
2616 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2617 is something that is a fat pointer, so convert to it first if it EXPR
2618 is not already a fat pointer. */
2621 convert_to_thin_pointer (tree type, tree expr)
2623 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2625 = convert_to_fat_pointer
2626 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2628 /* We get the pointer to the data and use a NOP_EXPR to make it the
2630 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
2632 expr = build1 (NOP_EXPR, type, expr);
2637 /* Create an expression whose value is that of EXPR,
2638 converted to type TYPE. The TREE_TYPE of the value
2639 is always TYPE. This function implements all reasonable
2640 conversions; callers should filter out those that are
2641 not permitted by the language being compiled. */
2644 convert (tree type, tree expr)
2646 enum tree_code code = TREE_CODE (type);
2647 tree etype = TREE_TYPE (expr);
2648 enum tree_code ecode = TREE_CODE (etype);
2651 /* If EXPR is already the right type, we are done. */
2655 /* If the input type has padding, remove it by doing a component reference
2656 to the field. If the output type has padding, make a constructor
2657 to build the record. If both input and output have padding and are
2658 of variable size, do this as an unchecked conversion. */
2659 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2660 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2661 && (!TREE_CONSTANT (TYPE_SIZE (type))
2662 || !TREE_CONSTANT (TYPE_SIZE (etype))))
2664 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2666 /* If we have just converted to this padded type, just get
2667 the inner expression. */
2668 if (TREE_CODE (expr) == CONSTRUCTOR
2669 && CONSTRUCTOR_ELTS (expr)
2670 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2671 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2673 return convert (type,
2674 build_component_ref (expr, NULL_TREE,
2675 TYPE_FIELDS (etype), false));
2677 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2679 /* If we previously converted from another type and our type is
2680 of variable size, remove the conversion to avoid the need for
2681 variable-size temporaries. */
2682 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2683 && !TREE_CONSTANT (TYPE_SIZE (type)))
2684 expr = TREE_OPERAND (expr, 0);
2686 /* If we are just removing the padding from expr, convert the original
2687 object if we have variable size. That will avoid the need
2688 for some variable-size temporaries. */
2689 if (TREE_CODE (expr) == COMPONENT_REF
2690 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2691 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2692 && !TREE_CONSTANT (TYPE_SIZE (type)))
2693 return convert (type, TREE_OPERAND (expr, 0));
2695 /* If the result type is a padded type with a self-referentially-sized
2696 field and the expression type is a record, do this as an
2697 unchecked converstion. */
2698 else if (TREE_CODE (etype) == RECORD_TYPE
2699 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2700 return unchecked_convert (type, expr, false);
2704 gnat_build_constructor (type,
2705 tree_cons (TYPE_FIELDS (type),
2707 (TYPE_FIELDS (type)),
2712 /* If the input is a biased type, adjust first. */
2713 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2714 return convert (type, fold (build2 (PLUS_EXPR, TREE_TYPE (etype),
2715 fold (build1 (NOP_EXPR,
2718 TYPE_MIN_VALUE (etype))));
2720 /* If the input is a justified modular type, we need to extract
2721 the actual object before converting it to any other type with the
2722 exception of an unconstrained array. */
2723 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
2724 && code != UNCONSTRAINED_ARRAY_TYPE)
2725 return convert (type, build_component_ref (expr, NULL_TREE,
2726 TYPE_FIELDS (etype), false));
2728 /* If converting to a type that contains a template, convert to the data
2729 type and then build the template. */
2730 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2732 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2734 /* If the source already has a template, get a reference to the
2735 associated array only, as we are going to rebuild a template
2736 for the target type anyway. */
2737 expr = maybe_unconstrained_array (expr);
2740 gnat_build_constructor
2742 tree_cons (TYPE_FIELDS (type),
2743 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2744 obj_type, NULL_TREE),
2745 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2746 convert (obj_type, expr), NULL_TREE)));
2749 /* There are some special cases of expressions that we process
2751 switch (TREE_CODE (expr))
2757 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2758 conversion in gnat_expand_expr. NULL_EXPR does not represent
2759 and actual value, so no conversion is needed. */
2760 expr = copy_node (expr);
2761 TREE_TYPE (expr) = type;
2765 /* If we are converting a STRING_CST to another constrained array type,
2766 just make a new one in the proper type. */
2767 if (code == ecode && AGGREGATE_TYPE_P (etype)
2768 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2769 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2770 && (TREE_CODE (expr) == STRING_CST
2771 || get_alias_set (etype) == get_alias_set (type)))
2773 expr = copy_node (expr);
2774 TREE_TYPE (expr) = type;
2779 case UNCONSTRAINED_ARRAY_REF:
2780 /* Convert this to the type of the inner array by getting the address of
2781 the array from the template. */
2782 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2783 build_component_ref (TREE_OPERAND (expr, 0),
2784 get_identifier ("P_ARRAY"),
2786 etype = TREE_TYPE (expr);
2787 ecode = TREE_CODE (etype);
2790 case VIEW_CONVERT_EXPR:
2791 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2792 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2793 return convert (type, TREE_OPERAND (expr, 0));
2797 /* If both types are record types, just convert the pointer and
2798 make a new INDIRECT_REF.
2800 ??? Disable this for now since it causes problems with the
2801 code in build_binary_op for MODIFY_EXPR which wants to
2802 strip off conversions. But that code really is a mess and
2803 we need to do this a much better way some time. */
2805 && (TREE_CODE (type) == RECORD_TYPE
2806 || TREE_CODE (type) == UNION_TYPE)
2807 && (TREE_CODE (etype) == RECORD_TYPE
2808 || TREE_CODE (etype) == UNION_TYPE)
2809 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2810 return build_unary_op (INDIRECT_REF, NULL_TREE,
2811 convert (build_pointer_type (type),
2812 TREE_OPERAND (expr, 0)));
2819 /* Check for converting to a pointer to an unconstrained array. */
2820 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2821 return convert_to_fat_pointer (type, expr);
2823 /* If we're converting between two aggregate types that have the same main
2824 variant, just make a VIEW_CONVER_EXPR. */
2825 else if (AGGREGATE_TYPE_P (type)
2826 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2827 return build1 (VIEW_CONVERT_EXPR, type, expr);
2829 /* In all other cases of related types, make a NOP_EXPR. */
2830 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2831 || (code == INTEGER_CST && ecode == INTEGER_CST
2832 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2833 return fold (build1 (NOP_EXPR, type, expr));
2838 return build1 (CONVERT_EXPR, type, expr);
2841 return fold (build1 (NOP_EXPR, type, gnat_truthvalue_conversion (expr)));
2844 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2845 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
2846 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
2847 return unchecked_convert (type, expr, false);
2848 else if (TYPE_BIASED_REPRESENTATION_P (type))
2849 return fold (build1 (CONVERT_EXPR, type,
2850 fold (build2 (MINUS_EXPR, TREE_TYPE (type),
2851 convert (TREE_TYPE (type), expr),
2852 TYPE_MIN_VALUE (type)))));
2854 /* ... fall through ... */
2857 return fold (convert_to_integer (type, expr));
2860 case REFERENCE_TYPE:
2861 /* If converting between two pointers to records denoting
2862 both a template and type, adjust if needed to account
2863 for any differing offsets, since one might be negative. */
2864 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2867 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2868 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2869 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2870 sbitsize_int (BITS_PER_UNIT));
2872 expr = build1 (NOP_EXPR, type, expr);
2873 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2874 if (integer_zerop (byte_diff))
2877 return build_binary_op (PLUS_EXPR, type, expr,
2878 fold (convert_to_pointer (type, byte_diff)));
2881 /* If converting to a thin pointer, handle specially. */
2882 if (TYPE_THIN_POINTER_P (type)
2883 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
2884 return convert_to_thin_pointer (type, expr);
2886 /* If converting fat pointer to normal pointer, get the pointer to the
2887 array and then convert it. */
2888 else if (TYPE_FAT_POINTER_P (etype))
2889 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
2892 return fold (convert_to_pointer (type, expr));
2895 return fold (convert_to_real (type, expr));
2898 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
2900 gnat_build_constructor
2901 (type, tree_cons (TYPE_FIELDS (type),
2902 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2905 /* ... fall through ... */
2908 /* In these cases, assume the front-end has validated the conversion.
2909 If the conversion is valid, it will be a bit-wise conversion, so
2910 it can be viewed as an unchecked conversion. */
2911 return unchecked_convert (type, expr, false);
2914 /* For unchecked unions, just validate that the type is indeed that of
2915 a field of the type. Then make the simple conversion. */
2916 if (TYPE_UNCHECKED_UNION_P (type))
2918 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
2920 if (TREE_TYPE (tem) == etype)
2921 return build1 (CONVERT_EXPR, type, expr);
2922 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
2923 && (TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
2924 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
2925 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
2926 return build1 (CONVERT_EXPR, type,
2927 convert (TREE_TYPE (tem), expr));
2933 /* Otherwise, this is a conversion between a tagged type and some
2934 subtype, which we have to mark as a UNION_TYPE because of
2935 overlapping fields. */
2936 return unchecked_convert (type, expr, false);
2938 case UNCONSTRAINED_ARRAY_TYPE:
2939 /* If EXPR is a constrained array, take its address, convert it to a
2940 fat pointer, and then dereference it. Likewise if EXPR is a
2941 record containing both a template and a constrained array.
2942 Note that a record representing a justified modular type
2943 always represents a packed constrained array. */
2944 if (ecode == ARRAY_TYPE
2945 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
2946 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
2947 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
2950 (INDIRECT_REF, NULL_TREE,
2951 convert_to_fat_pointer (TREE_TYPE (type),
2952 build_unary_op (ADDR_EXPR,
2955 /* Do something very similar for converting one unconstrained
2956 array to another. */
2957 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
2959 build_unary_op (INDIRECT_REF, NULL_TREE,
2960 convert (TREE_TYPE (type),
2961 build_unary_op (ADDR_EXPR,
2967 return fold (convert_to_complex (type, expr));
2974 /* Remove all conversions that are done in EXP. This includes converting
2975 from a padded type or to a justified modular type. If TRUE_ADDRESS
2976 is true, always return the address of the containing object even if
2977 the address is not bit-aligned. */
2980 remove_conversions (tree exp, bool true_address)
2982 switch (TREE_CODE (exp))
2986 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2987 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
2988 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), true);
2992 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
2993 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2994 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
2997 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
2998 case NOP_EXPR: case CONVERT_EXPR:
2999 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3008 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3009 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3010 likewise return an expression pointing to the underlying array. */
3013 maybe_unconstrained_array (tree exp)
3015 enum tree_code code = TREE_CODE (exp);
3018 switch (TREE_CODE (TREE_TYPE (exp)))
3020 case UNCONSTRAINED_ARRAY_TYPE:
3021 if (code == UNCONSTRAINED_ARRAY_REF)
3024 = build_unary_op (INDIRECT_REF, NULL_TREE,
3025 build_component_ref (TREE_OPERAND (exp, 0),
3026 get_identifier ("P_ARRAY"),
3028 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3032 else if (code == NULL_EXPR)
3033 return build1 (NULL_EXPR,
3034 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3035 (TREE_TYPE (TREE_TYPE (exp))))),
3036 TREE_OPERAND (exp, 0));
3039 /* If this is a padded type, convert to the unpadded type and see if
3040 it contains a template. */
3041 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3043 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3044 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3045 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3047 build_component_ref (new, NULL_TREE,
3048 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3051 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3053 build_component_ref (exp, NULL_TREE,
3054 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3064 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3065 If NOTRUNC_P is true, truncation operations should be suppressed. */
3068 unchecked_convert (tree type, tree expr, bool notrunc_p)
3070 tree etype = TREE_TYPE (expr);
3072 /* If the expression is already the right type, we are done. */
3076 /* If both types types are integral just do a normal conversion.
3077 Likewise for a conversion to an unconstrained array. */
3078 if ((((INTEGRAL_TYPE_P (type)
3079 && !(TREE_CODE (type) == INTEGER_TYPE
3080 && TYPE_VAX_FLOATING_POINT_P (type)))
3081 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3082 || (TREE_CODE (type) == RECORD_TYPE
3083 && TYPE_JUSTIFIED_MODULAR_P (type)))
3084 && ((INTEGRAL_TYPE_P (etype)
3085 && !(TREE_CODE (etype) == INTEGER_TYPE
3086 && TYPE_VAX_FLOATING_POINT_P (etype)))
3087 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3088 || (TREE_CODE (etype) == RECORD_TYPE
3089 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3090 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3094 if (TREE_CODE (etype) == INTEGER_TYPE
3095 && TYPE_BIASED_REPRESENTATION_P (etype))
3097 tree ntype = copy_type (etype);
3099 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3100 TYPE_MAIN_VARIANT (ntype) = ntype;
3101 expr = build1 (NOP_EXPR, ntype, expr);
3104 if (TREE_CODE (type) == INTEGER_TYPE
3105 && TYPE_BIASED_REPRESENTATION_P (type))
3107 rtype = copy_type (type);
3108 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3109 TYPE_MAIN_VARIANT (rtype) = rtype;
3112 expr = convert (rtype, expr);
3114 expr = build1 (NOP_EXPR, type, expr);
3117 /* If we are converting TO an integral type whose precision is not the
3118 same as its size, first unchecked convert to a record that contains
3119 an object of the output type. Then extract the field. */
3120 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3121 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3122 GET_MODE_BITSIZE (TYPE_MODE (type))))
3124 tree rec_type = make_node (RECORD_TYPE);
3125 tree field = create_field_decl (get_identifier ("OBJ"), type,
3126 rec_type, 1, 0, 0, 0);
3128 TYPE_FIELDS (rec_type) = field;
3129 layout_type (rec_type);
3131 expr = unchecked_convert (rec_type, expr, notrunc_p);
3132 expr = build_component_ref (expr, NULL_TREE, field, 0);
3135 /* Similarly for integral input type whose precision is not equal to its
3137 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3138 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3139 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3141 tree rec_type = make_node (RECORD_TYPE);
3143 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3146 TYPE_FIELDS (rec_type) = field;
3147 layout_type (rec_type);
3149 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3150 expr = unchecked_convert (type, expr, notrunc_p);
3153 /* We have a special case when we are converting between two
3154 unconstrained array types. In that case, take the address,
3155 convert the fat pointer types, and dereference. */
3156 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3157 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3158 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3159 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3160 build_unary_op (ADDR_EXPR, NULL_TREE,
3164 expr = maybe_unconstrained_array (expr);
3166 /* There's no point in doing two unchecked conversions in a row. */
3167 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3168 expr = TREE_OPERAND (expr, 0);
3170 etype = TREE_TYPE (expr);
3171 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3174 /* If the result is an integral type whose size is not equal to
3175 the size of the underlying machine type, sign- or zero-extend
3176 the result. We need not do this in the case where the input is
3177 an integral type of the same precision and signedness or if the output
3178 is a biased type or if both the input and output are unsigned. */
3180 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3181 && !(TREE_CODE (type) == INTEGER_TYPE
3182 && TYPE_BIASED_REPRESENTATION_P (type))
3183 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3184 GET_MODE_BITSIZE (TYPE_MODE (type)))
3185 && !(INTEGRAL_TYPE_P (etype)
3186 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3187 && operand_equal_p (TYPE_RM_SIZE (type),
3188 (TYPE_RM_SIZE (etype) != 0
3189 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3191 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3193 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3194 TYPE_UNSIGNED (type));
3196 = convert (base_type,
3197 size_binop (MINUS_EXPR,
3199 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3200 TYPE_RM_SIZE (type)));
3203 build_binary_op (RSHIFT_EXPR, base_type,
3204 build_binary_op (LSHIFT_EXPR, base_type,
3205 convert (base_type, expr),
3210 /* An unchecked conversion should never raise Constraint_Error. The code
3211 below assumes that GCC's conversion routines overflow the same way that
3212 the underlying hardware does. This is probably true. In the rare case
3213 when it is false, we can rely on the fact that such conversions are
3214 erroneous anyway. */
3215 if (TREE_CODE (expr) == INTEGER_CST)
3216 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3218 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3219 show no longer constant. */
3220 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3221 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3223 TREE_CONSTANT (expr) = 0;
3228 /* Search the chain of currently reachable declarations for a builtin
3229 FUNCTION_DECL node corresponding to function NAME (an IDENTIFIER_NODE).
3230 Return the first node found, if any, or NULL_TREE otherwise. */
3233 builtin_decl_for (tree name __attribute__ ((unused)))
3235 /* ??? not clear yet how to implement this function in tree-ssa, so
3236 return NULL_TREE for now */
3240 #include "gt-ada-utils.h"
3241 #include "gtype-ada.h"