1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
82 /* This listhead is used to record any global objects that need elaboration.
83 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
84 initial value to assign. */
86 static GTY(()) tree pending_elaborations;
88 /* This stack allows us to momentarily switch to generating elaboration
89 lists for an inner context. */
91 struct e_stack GTY((chain_next ("%h.next"))) {
95 static GTY(()) struct e_stack *elist_stack;
97 /* This variable keeps a table for types for each precision so that we only
98 allocate each of them once. Signed and unsigned types are kept separate.
100 Note that these types are only used when fold-const requests something
101 special. Perhaps we should NOT share these types; we'll see how it
103 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
105 /* Likewise for float types, but record these by mode. */
106 static GTY(()) tree float_types[NUM_MACHINE_MODES];
108 /* For each binding contour we allocate a binding_level structure to indicate
109 the binding depth. */
111 struct ada_binding_level GTY((chain_next ("%h.chain")))
113 /* The binding level containing this one (the enclosing binding level). */
114 struct ada_binding_level *chain;
115 /* The BLOCK node for this level. */
117 /* If nonzero, the setjmp buffer that needs to be updated for any
118 variable-sized definition within this context. */
122 /* The binding level currently in effect. */
123 static GTY(()) struct ada_binding_level *current_binding_level;
125 /* A chain of ada_binding_level structures awaiting reuse. */
126 static GTY((deletable)) struct ada_binding_level *free_binding_level;
128 /* A chain of unused BLOCK nodes. */
129 static GTY((deletable)) tree free_block_chain;
131 struct language_function GTY(())
136 static tree mark_visited (tree *, int *, void *);
137 static void gnat_define_builtin (const char *, tree, int, const char *, bool);
138 static void gnat_install_builtins (void);
139 static tree merge_sizes (tree, tree, tree, int, int);
140 static tree compute_related_constant (tree, tree);
141 static tree split_plus (tree, tree *);
142 static int value_zerop (tree);
143 static void gnat_gimplify_function (tree);
144 static void gnat_finalize (tree);
145 static tree float_type_for_precision (int, enum machine_mode);
146 static tree convert_to_fat_pointer (tree, tree);
147 static tree convert_to_thin_pointer (tree, tree);
148 static tree make_descriptor_field (const char *,tree, tree, tree);
149 static int value_factor_p (tree, int);
150 static int potential_alignment_gap (tree, tree, tree);
152 /* Initialize the association of GNAT nodes to GCC trees. */
155 init_gnat_to_gnu (void)
157 associate_gnat_to_gnu
158 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
160 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
163 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
164 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
165 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
167 If GNU_DECL is zero, a previous association is to be reset. */
170 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
172 /* Check that GNAT_ENTITY is not already defined and that it is being set
173 to something which is a decl. Raise gigi 401 if not. Usually, this
174 means GNAT_ENTITY is defined twice, but occasionally is due to some
177 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
178 || (! no_check && ! DECL_P (gnu_decl))))
181 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
184 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
185 Return the ..._DECL node that was associated with it. If there is no tree
186 node associated with GNAT_ENTITY, abort.
188 In some cases, such as delayed elaboration or expressions that need to
189 be elaborated only once, GNAT_ENTITY is really not an entity. */
192 get_gnu_tree (Entity_Id gnat_entity)
194 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
197 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
200 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
203 present_gnu_tree (Entity_Id gnat_entity)
205 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
209 /* Return non-zero if we are currently in the global binding level. */
212 global_bindings_p (void)
214 return (force_global != 0 || current_binding_level->chain == 0 ? -1 : 0);
217 /* Return the list of declarations in the current level. Note that this list
218 is in reverse order (it has to be so for back-end compatibility). */
223 return BLOCK_VARS (current_binding_level->block);
226 /* Enter a new binding level. */
231 struct ada_binding_level *newlevel = NULL;
233 /* Reuse a struct for this binding level, if there is one. */
234 if (free_binding_level)
236 newlevel = free_binding_level;
237 free_binding_level = free_binding_level->chain;
241 = (struct ada_binding_level *)
242 ggc_alloc (sizeof (struct ada_binding_level));
244 /* Use a free BLOCK, if any; otherwise, allocate one. */
245 if (free_block_chain)
247 newlevel->block = free_block_chain;
248 free_block_chain = TREE_CHAIN (free_block_chain);
249 TREE_CHAIN (newlevel->block) = NULL_TREE;
252 newlevel->block = make_node (BLOCK);
254 /* Point the BLOCK we just made to its parent. */
255 if (current_binding_level)
256 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
258 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
260 /* Add this level to the front of the chain (stack) of levels that are
262 newlevel->chain = current_binding_level;
263 newlevel->jmpbuf_decl = NULL_TREE;
264 current_binding_level = newlevel;
267 /* Set the jmpbuf_decl for the current binding level to DECL. */
270 set_block_jmpbuf_decl (tree decl)
272 current_binding_level->jmpbuf_decl = decl;
275 /* Get the jmpbuf_decl, if any, for the current binding level. */
278 get_block_jmpbuf_decl ()
280 return current_binding_level->jmpbuf_decl;
283 /* Exit a binding level. Set any BLOCK into the current code group. */
288 struct ada_binding_level *level = current_binding_level;
289 tree block = level->block;
291 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
292 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
294 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
295 are no variables free the block and merge its subblocks into those of its
296 parent block. Otherwise, add it to the list of its parent. */
297 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
299 else if (BLOCK_VARS (block) == NULL_TREE)
301 BLOCK_SUBBLOCKS (level->chain->block)
302 = chainon (BLOCK_SUBBLOCKS (block),
303 BLOCK_SUBBLOCKS (level->chain->block));
304 TREE_CHAIN (block) = free_block_chain;
305 free_block_chain = block;
309 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
310 BLOCK_SUBBLOCKS (level->chain->block) = block;
311 TREE_USED (block) = 1;
312 set_block_for_group (block);
315 /* Free this binding structure. */
316 current_binding_level = level->chain;
317 level->chain = free_binding_level;
318 free_binding_level = level;
321 /* Insert BLOCK at the end of the list of subblocks of the
322 current binding level. This is used when a BIND_EXPR is expanded,
323 to handle the BLOCK node inside the BIND_EXPR. */
326 insert_block (tree block)
328 TREE_USED (block) = 1;
329 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
330 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
333 /* Return nonzero if the current binding has any variables. This means
334 it will have a BLOCK node. */
339 return BLOCK_VARS (current_binding_level->block) != 0;
342 /* Utility function to mark nodes with TREE_VISITED. Called from walk_tree.
343 We use this to indicate all variable sizes and positions in global types
344 may not be shared by any subprogram. */
347 mark_visited (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
349 if (TREE_VISITED (*tp))
352 TREE_VISITED (*tp) = 1;
357 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
358 Returns the ..._DECL node. */
363 /* If at top level, there is no context. But PARM_DECLs always go in the
364 level of its function. Also, at toplevel we must protect all trees
365 that are part of sizes and positions. */
366 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
368 DECL_CONTEXT (decl) = 0;
369 walk_tree (&decl, mark_visited, NULL, NULL);
372 DECL_CONTEXT (decl) = current_function_decl;
374 /* Put the declaration on the list. The list of declarations is in reverse
375 order. The list will be reversed later.
377 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
378 will cause trouble with the debugger and aren't needed anyway. */
379 if (TREE_CODE (decl) != TYPE_DECL
380 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
382 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
383 BLOCK_VARS (current_binding_level->block) = decl;
386 /* For the declaration of a type, set its name if it either is not already
387 set, was set to an IDENTIFIER_NODE, indicating an internal name,
388 or if the previous type name was not derived from a source name.
389 We'd rather have the type named with a real name and all the pointer
390 types to the same object have the same POINTER_TYPE node. Code in this
391 function in c-decl.c makes a copy of the type node here, but that may
392 cause us trouble with incomplete types, so let's not try it (at least
395 if (TREE_CODE (decl) == TYPE_DECL
396 && DECL_NAME (decl) != 0
397 && (TYPE_NAME (TREE_TYPE (decl)) == 0
398 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
399 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
400 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
401 && ! DECL_ARTIFICIAL (decl))))
402 TYPE_NAME (TREE_TYPE (decl)) = decl;
407 /* Do little here. Set up the standard declarations later after the
408 front end has been run. */
411 gnat_init_decl_processing (void)
415 /* Make the binding_level structure for global names. */
416 current_function_decl = 0;
417 current_binding_level = 0;
418 free_binding_level = 0;
421 build_common_tree_nodes (0);
423 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
424 corresponding to the size of Pmode. In most cases when ptr_mode and
425 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
426 far better code using the width of Pmode. Make this here since we need
427 this before we can expand the GNAT types. */
428 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
429 set_sizetype (size_type_node);
430 build_common_tree_nodes_2 (0);
432 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
434 /* We need to make the integer type before doing anything else.
435 We stitch this in to the appropriate GNAT type later. */
436 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
438 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
441 ptr_void_type_node = build_pointer_type (void_type_node);
443 gnat_install_builtins ();
446 /* Define a builtin function. This is temporary and is just being done
447 to initialize implicit_built_in_decls for the middle-end. We'll want
448 to do full builtin processing soon. */
451 gnat_define_builtin (const char *name, tree type,
452 int function_code, const char *library_name, bool const_p)
454 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
456 DECL_EXTERNAL (decl) = 1;
457 TREE_PUBLIC (decl) = 1;
459 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
460 make_decl_rtl (decl, NULL);
462 DECL_BUILT_IN_CLASS (decl) = BUILT_IN_NORMAL;
463 DECL_FUNCTION_CODE (decl) = function_code;
464 TREE_READONLY (decl) = const_p;
466 implicit_built_in_decls[function_code] = decl;
469 /* Install the builtin functions the middle-end needs. */
472 gnat_install_builtins ()
477 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
478 tmp = tree_cons (NULL_TREE, long_integer_type_node, tmp);
479 ftype = build_function_type (long_integer_type_node, tmp);
480 gnat_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT,
481 "__builtin_expect", true);
483 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
484 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
485 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
486 ftype = build_function_type (ptr_void_type_node, tmp);
487 gnat_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
490 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
491 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
492 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
493 ftype = build_function_type (integer_type_node, tmp);
494 gnat_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
497 tmp = tree_cons (NULL_TREE, integer_type_node, void_list_node);
498 ftype = build_function_type (integer_type_node, tmp);
499 gnat_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "clz", true);
501 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
502 ftype = build_function_type (integer_type_node, tmp);
503 gnat_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "clzl", true);
505 tmp = tree_cons (NULL_TREE, long_long_integer_type_node, void_list_node);
506 ftype = build_function_type (integer_type_node, tmp);
507 gnat_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "clzll",
510 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
511 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
512 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
513 ftype = build_function_type (void_type_node, tmp);
514 gnat_define_builtin ("__builtin_init_trampoline", ftype,
515 BUILT_IN_INIT_TRAMPOLINE, "init_trampoline", false);
517 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
518 ftype = build_function_type (ptr_void_type_node, tmp);
519 gnat_define_builtin ("__builtin_adjust_trampoline", ftype,
520 BUILT_IN_ADJUST_TRAMPOLINE, "adjust_trampoline", true);
522 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
523 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
524 ftype = build_function_type (ptr_void_type_node, tmp);
525 gnat_define_builtin ("__builtin_stack_alloc", ftype, BUILT_IN_STACK_ALLOC,
526 "stack_alloc", false);
528 /* The stack_save and stack_restore builtins aren't used directly. They
529 are inserted during gimplification to implement stack_alloc calls. */
530 ftype = build_function_type (ptr_void_type_node, void_list_node);
531 gnat_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
532 "stack_save", false);
533 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
534 ftype = build_function_type (void_type_node, tmp);
535 gnat_define_builtin ("__builtin_stack_restore", ftype,
536 BUILT_IN_STACK_RESTORE, "stack_restore", false);
540 /* Create the predefined scalar types such as `integer_type_node' needed
541 in the gcc back-end and initialize the global binding level. */
544 init_gigi_decls (tree long_long_float_type, tree exception_type)
549 /* Set the types that GCC and Gigi use from the front end. We would like
550 to do this for char_type_node, but it needs to correspond to the C
552 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
554 /* In this case, the builtin floating point types are VAX float,
555 so make up a type for use. */
556 longest_float_type_node = make_node (REAL_TYPE);
557 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
558 layout_type (longest_float_type_node);
559 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
560 longest_float_type_node));
563 longest_float_type_node = TREE_TYPE (long_long_float_type);
565 except_type_node = TREE_TYPE (exception_type);
567 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
568 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
569 unsigned_type_node));
572 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
575 void_ftype = build_function_type (void_type_node, NULL_TREE);
576 ptr_void_ftype = build_pointer_type (void_ftype);
578 /* Now declare runtime functions. */
579 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
581 /* malloc is a function declaration tree for a function to allocate
583 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
585 build_function_type (ptr_void_type_node,
586 tree_cons (NULL_TREE,
589 NULL_TREE, 0, 1, 1, 0);
591 /* free is a function declaration tree for a function to free memory. */
593 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
594 build_function_type (void_type_node,
595 tree_cons (NULL_TREE,
598 NULL_TREE, 0, 1, 1, 0);
600 /* Make the types and functions used for exception processing. */
602 = build_array_type (gnat_type_for_mode (Pmode, 0),
603 build_index_type (build_int_2 (5, 0)));
604 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
605 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
607 /* Functions to get and set the jumpbuf pointer for the current thread. */
609 = create_subprog_decl
610 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
611 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
612 NULL_TREE, 0, 1, 1, 0);
615 = create_subprog_decl
616 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
618 build_function_type (void_type_node,
619 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
620 NULL_TREE, 0, 1, 1, 0);
622 /* Function to get the current exception. */
624 = create_subprog_decl
625 (get_identifier ("system__soft_links__get_gnat_exception"),
627 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
628 NULL_TREE, 0, 1, 1, 0);
630 /* Functions that raise exceptions. */
632 = create_subprog_decl
633 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
634 build_function_type (void_type_node,
635 tree_cons (NULL_TREE,
636 build_pointer_type (except_type_node),
638 NULL_TREE, 0, 1, 1, 0);
640 /* Hooks to call when entering/leaving an exception handler. */
642 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
643 build_function_type (void_type_node,
644 tree_cons (NULL_TREE,
647 NULL_TREE, 0, 1, 1, 0);
650 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
651 build_function_type (void_type_node,
652 tree_cons (NULL_TREE,
655 NULL_TREE, 0, 1, 1, 0);
657 /* If in no exception handlers mode, all raise statements are redirected to
658 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
659 this procedure will never be called in this mode. */
660 if (No_Exception_Handlers_Set ())
663 = create_subprog_decl
664 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
665 build_function_type (void_type_node,
666 tree_cons (NULL_TREE,
667 build_pointer_type (char_type_node),
668 tree_cons (NULL_TREE,
671 NULL_TREE, 0, 1, 1, 0);
673 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
674 gnat_raise_decls[i] = decl;
677 /* Otherwise, make one decl for each exception reason. */
678 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
682 sprintf (name, "__gnat_rcheck_%.2d", i);
684 = create_subprog_decl
685 (get_identifier (name), NULL_TREE,
686 build_function_type (void_type_node,
687 tree_cons (NULL_TREE,
690 tree_cons (NULL_TREE,
693 NULL_TREE, 0, 1, 1, 0);
696 /* Indicate that these never return. */
697 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
698 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
699 TREE_TYPE (raise_nodefer_decl)
700 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
703 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
705 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
706 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
707 TREE_TYPE (gnat_raise_decls[i])
708 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
712 /* setjmp returns an integer and has one operand, which is a pointer to
715 = create_subprog_decl
716 (get_identifier ("__builtin_setjmp"), NULL_TREE,
717 build_function_type (integer_type_node,
718 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
719 NULL_TREE, 0, 1, 1, 0);
721 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
722 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
724 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
726 update_setjmp_buf_decl
727 = create_subprog_decl
728 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
729 build_function_type (void_type_node,
730 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
731 NULL_TREE, 0, 1, 1, 0);
733 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
734 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
736 main_identifier_node = get_identifier ("main");
739 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
740 nodes (FIELDLIST), finish constructing the record or union type.
741 If HAS_REP is nonzero, this record has a rep clause; don't call
742 layout_type but merely set the size and alignment ourselves.
743 If DEFER_DEBUG is nonzero, do not call the debugging routines
744 on this type; it will be done later. */
747 finish_record_type (tree record_type,
752 enum tree_code code = TREE_CODE (record_type);
753 tree ada_size = bitsize_zero_node;
754 tree size = bitsize_zero_node;
755 tree size_unit = size_zero_node;
759 TYPE_FIELDS (record_type) = fieldlist;
761 if (TYPE_NAME (record_type) != 0
762 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
763 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
765 TYPE_STUB_DECL (record_type)
766 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
769 /* We don't need both the typedef name and the record name output in
770 the debugging information, since they are the same. */
771 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
773 /* Globally initialize the record first. If this is a rep'ed record,
774 that just means some initializations; otherwise, layout the record. */
778 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
779 TYPE_MODE (record_type) = BLKmode;
780 if (TYPE_SIZE (record_type) == 0)
782 TYPE_SIZE (record_type) = bitsize_zero_node;
783 TYPE_SIZE_UNIT (record_type) = size_zero_node;
785 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
786 out just like a UNION_TYPE, since the size will be fixed. */
787 else if (code == QUAL_UNION_TYPE)
792 /* Ensure there isn't a size already set. There can be in an error
793 case where there is a rep clause but all fields have errors and
794 no longer have a position. */
795 TYPE_SIZE (record_type) = 0;
796 layout_type (record_type);
799 /* At this point, the position and size of each field is known. It was
800 either set before entry by a rep clause, or by laying out the type above.
802 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
803 to compute the Ada size; the GCC size and alignment (for rep'ed records
804 that are not padding types); and the mode (for rep'ed records). We also
805 clear the DECL_BIT_FIELD indication for the cases we know have not been
806 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
808 if (code == QUAL_UNION_TYPE)
809 fieldlist = nreverse (fieldlist);
811 for (field = fieldlist; field; field = TREE_CHAIN (field))
813 tree pos = bit_position (field);
815 tree type = TREE_TYPE (field);
816 tree this_size = DECL_SIZE (field);
817 tree this_size_unit = DECL_SIZE_UNIT (field);
818 tree this_ada_size = DECL_SIZE (field);
820 /* We need to make an XVE/XVU record if any field has variable size,
821 whether or not the record does. For example, if we have an union,
822 it may be that all fields, rounded up to the alignment, have the
823 same size, in which case we'll use that size. But the debug
824 output routines (except Dwarf2) won't be able to output the fields,
825 so we need to make the special record. */
826 if (TREE_CODE (this_size) != INTEGER_CST)
829 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
830 || TREE_CODE (type) == QUAL_UNION_TYPE)
831 && ! TYPE_IS_FAT_POINTER_P (type)
832 && ! TYPE_CONTAINS_TEMPLATE_P (type)
833 && TYPE_ADA_SIZE (type) != 0)
834 this_ada_size = TYPE_ADA_SIZE (type);
836 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
837 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
838 && value_factor_p (pos, BITS_PER_UNIT)
839 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
840 DECL_BIT_FIELD (field) = 0;
842 /* If we still have DECL_BIT_FIELD set at this point, we know the field
843 is technically not addressable. Except that it can actually be
844 addressed if the field is BLKmode and happens to be properly
846 DECL_NONADDRESSABLE_P (field)
847 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
849 if (has_rep && ! DECL_BIT_FIELD (field))
850 TYPE_ALIGN (record_type)
851 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
856 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
857 size = size_binop (MAX_EXPR, size, this_size);
858 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
861 case QUAL_UNION_TYPE:
863 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
864 this_ada_size, ada_size));
865 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
867 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
868 this_size_unit, size_unit));
872 /* Since we know here that all fields are sorted in order of
873 increasing bit position, the size of the record is one
874 higher than the ending bit of the last field processed
875 unless we have a rep clause, since in that case we might
876 have a field outside a QUAL_UNION_TYPE that has a higher ending
877 position. So use a MAX in that case. Also, if this field is a
878 QUAL_UNION_TYPE, we need to take into account the previous size in
879 the case of empty variants. */
881 = merge_sizes (ada_size, pos, this_ada_size,
882 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
883 size = merge_sizes (size, pos, this_size,
884 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
886 = merge_sizes (size_unit, byte_position (field), this_size_unit,
887 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
895 if (code == QUAL_UNION_TYPE)
896 nreverse (fieldlist);
898 /* If this is a padding record, we never want to make the size smaller than
899 what was specified in it, if any. */
900 if (TREE_CODE (record_type) == RECORD_TYPE
901 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
903 size = TYPE_SIZE (record_type);
904 size_unit = TYPE_SIZE_UNIT (record_type);
907 /* Now set any of the values we've just computed that apply. */
908 if (! TYPE_IS_FAT_POINTER_P (record_type)
909 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
910 SET_TYPE_ADA_SIZE (record_type, ada_size);
914 if (! (TREE_CODE (record_type) == RECORD_TYPE
915 && TYPE_IS_PADDING_P (record_type)
916 && CONTAINS_PLACEHOLDER_P (size)))
918 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
919 TYPE_SIZE_UNIT (record_type)
920 = round_up (size_unit,
921 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
924 compute_record_mode (record_type);
929 /* If this record is of variable size, rename it so that the
930 debugger knows it is and make a new, parallel, record
931 that tells the debugger how the record is laid out. See
932 exp_dbug.ads. But don't do this for records that are padding
933 since they confuse GDB. */
935 && ! (TREE_CODE (record_type) == RECORD_TYPE
936 && TYPE_IS_PADDING_P (record_type)))
939 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
940 ? UNION_TYPE : TREE_CODE (record_type));
941 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
943 = concat_id_with_name (orig_id,
944 TREE_CODE (record_type) == QUAL_UNION_TYPE
946 tree last_pos = bitsize_zero_node;
948 tree prev_old_field = 0;
950 TYPE_NAME (new_record_type) = new_id;
951 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
952 TYPE_STUB_DECL (new_record_type)
953 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
954 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
955 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
956 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
957 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
959 /* Now scan all the fields, replacing each field with a new
960 field corresponding to the new encoding. */
961 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
962 old_field = TREE_CHAIN (old_field))
964 tree field_type = TREE_TYPE (old_field);
965 tree field_name = DECL_NAME (old_field);
967 tree curpos = bit_position (old_field);
969 unsigned int align = 0;
972 /* See how the position was modified from the last position.
974 There are two basic cases we support: a value was added
975 to the last position or the last position was rounded to
976 a boundary and they something was added. Check for the
977 first case first. If not, see if there is any evidence
978 of rounding. If so, round the last position and try
981 If this is a union, the position can be taken as zero. */
983 if (TREE_CODE (new_record_type) == UNION_TYPE)
984 pos = bitsize_zero_node, align = 0;
986 pos = compute_related_constant (curpos, last_pos);
988 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
989 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
991 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
992 pos = compute_related_constant (curpos,
993 round_up (last_pos, align));
995 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
996 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
997 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
998 && host_integerp (TREE_OPERAND
999 (TREE_OPERAND (curpos, 0), 1),
1004 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1005 pos = compute_related_constant (curpos,
1006 round_up (last_pos, align));
1008 else if (potential_alignment_gap (prev_old_field, old_field,
1011 align = TYPE_ALIGN (field_type);
1012 pos = compute_related_constant (curpos,
1013 round_up (last_pos, align));
1016 /* If we can't compute a position, set it to zero.
1018 ??? We really should abort here, but it's too much work
1019 to get this correct for all cases. */
1022 pos = bitsize_zero_node;
1024 /* See if this type is variable-size and make a new type
1025 and indicate the indirection if so. */
1026 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1028 field_type = build_pointer_type (field_type);
1032 /* Make a new field name, if necessary. */
1033 if (var || align != 0)
1038 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1039 align / BITS_PER_UNIT);
1041 strcpy (suffix, "XVL");
1043 field_name = concat_id_with_name (field_name, suffix);
1046 new_field = create_field_decl (field_name, field_type,
1048 DECL_SIZE (old_field), pos, 0);
1049 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1050 TYPE_FIELDS (new_record_type) = new_field;
1052 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1053 zero. The only time it's not the last field of the record
1054 is when there are other components at fixed positions after
1055 it (meaning there was a rep clause for every field) and we
1056 want to be able to encode them. */
1057 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1058 (TREE_CODE (TREE_TYPE (old_field))
1061 : DECL_SIZE (old_field));
1062 prev_old_field = old_field;
1065 TYPE_FIELDS (new_record_type)
1066 = nreverse (TYPE_FIELDS (new_record_type));
1068 rest_of_type_compilation (new_record_type, global_bindings_p ());
1071 rest_of_type_compilation (record_type, global_bindings_p ());
1075 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1076 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1077 if this represents a QUAL_UNION_TYPE in which case we must look for
1078 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1079 is nonzero, we must take the MAX of the end position of this field
1080 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1082 We return an expression for the size. */
1085 merge_sizes (tree last_size,
1091 tree type = TREE_TYPE (last_size);
1094 if (! special || TREE_CODE (size) != COND_EXPR)
1096 new = size_binop (PLUS_EXPR, first_bit, size);
1098 new = size_binop (MAX_EXPR, last_size, new);
1102 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1103 integer_zerop (TREE_OPERAND (size, 1))
1104 ? last_size : merge_sizes (last_size, first_bit,
1105 TREE_OPERAND (size, 1),
1107 integer_zerop (TREE_OPERAND (size, 2))
1108 ? last_size : merge_sizes (last_size, first_bit,
1109 TREE_OPERAND (size, 2),
1112 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1113 when fed through substitute_in_expr) into thinking that a constant
1114 size is not constant. */
1115 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1116 new = TREE_OPERAND (new, 0);
1121 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1122 related by the addition of a constant. Return that constant if so. */
1125 compute_related_constant (tree op0, tree op1)
1127 tree op0_var, op1_var;
1128 tree op0_con = split_plus (op0, &op0_var);
1129 tree op1_con = split_plus (op1, &op1_var);
1130 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1132 if (operand_equal_p (op0_var, op1_var, 0))
1134 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1140 /* Utility function of above to split a tree OP which may be a sum, into a
1141 constant part, which is returned, and a variable part, which is stored
1142 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1146 split_plus (tree in, tree *pvar)
1148 /* Strip NOPS in order to ease the tree traversal and maximize the
1149 potential for constant or plus/minus discovery. We need to be careful
1150 to always return and set *pvar to bitsizetype trees, but it's worth
1154 *pvar = convert (bitsizetype, in);
1156 if (TREE_CODE (in) == INTEGER_CST)
1158 *pvar = bitsize_zero_node;
1159 return convert (bitsizetype, in);
1161 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1163 tree lhs_var, rhs_var;
1164 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1165 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1167 if (lhs_var == TREE_OPERAND (in, 0)
1168 && rhs_var == TREE_OPERAND (in, 1))
1169 return bitsize_zero_node;
1171 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1172 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1175 return bitsize_zero_node;
1178 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1179 subprogram. If it is void_type_node, then we are dealing with a procedure,
1180 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1181 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1182 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1183 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1184 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1185 RETURNS_WITH_DSP is nonzero if the function is to return with a
1186 depressed stack pointer. */
1189 create_subprog_type (tree return_type,
1190 tree param_decl_list,
1192 int returns_unconstrained,
1194 int returns_with_dsp)
1196 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1197 the subprogram formal parameters. This list is generated by traversing the
1198 input list of PARM_DECL nodes. */
1199 tree param_type_list = NULL;
1203 for (param_decl = param_decl_list; param_decl;
1204 param_decl = TREE_CHAIN (param_decl))
1205 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1208 /* The list of the function parameter types has to be terminated by the void
1209 type to signal to the back-end that we are not dealing with a variable
1210 parameter subprogram, but that the subprogram has a fixed number of
1212 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1214 /* The list of argument types has been created in reverse
1216 param_type_list = nreverse (param_type_list);
1218 type = build_function_type (return_type, param_type_list);
1220 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1221 or the new type should, make a copy of TYPE. Likewise for
1222 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1223 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1224 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1225 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1226 type = copy_type (type);
1228 SET_TYPE_CI_CO_LIST (type, cico_list);
1229 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1230 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1231 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1235 /* Return a copy of TYPE but safe to modify in any way. */
1238 copy_type (tree type)
1240 tree new = copy_node (type);
1242 /* copy_node clears this field instead of copying it, because it is
1243 aliased with TREE_CHAIN. */
1244 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1246 TYPE_POINTER_TO (new) = 0;
1247 TYPE_REFERENCE_TO (new) = 0;
1248 TYPE_MAIN_VARIANT (new) = new;
1249 TYPE_NEXT_VARIANT (new) = 0;
1254 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1255 TYPE_INDEX_TYPE is INDEX. */
1258 create_index_type (tree min, tree max, tree index)
1260 /* First build a type for the desired range. */
1261 tree type = build_index_2_type (min, max);
1263 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1264 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1265 is set, but not to INDEX, make a copy of this type with the requested
1266 index type. Note that we have no way of sharing these types, but that's
1267 only a small hole. */
1268 if (TYPE_INDEX_TYPE (type) == index)
1270 else if (TYPE_INDEX_TYPE (type) != 0)
1271 type = copy_type (type);
1273 SET_TYPE_INDEX_TYPE (type, index);
1277 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1278 string) and TYPE is a ..._TYPE node giving its data type.
1279 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1280 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1281 information about this type. */
1284 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1285 int artificial_p, int debug_info_p)
1287 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1288 enum tree_code code = TREE_CODE (type);
1290 DECL_ARTIFICIAL (type_decl) = artificial_p;
1291 pushdecl (type_decl);
1292 process_attributes (type_decl, attr_list);
1294 /* Pass type declaration information to the debugger unless this is an
1295 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1296 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1297 a dummy type, which will be completed later, or a type for which
1298 debugging information was not requested. */
1299 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1301 DECL_IGNORED_P (type_decl) = 1;
1302 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1303 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1304 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1305 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1310 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1311 ASM_NAME is its assembler name (if provided). TYPE is its data type
1312 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1313 expression; NULL_TREE if none.
1315 CONST_FLAG is nonzero if this variable is constant.
1317 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1318 the current compilation unit. This flag should be set when processing the
1319 variable definitions in a package specification. EXTERN_FLAG is nonzero
1320 when processing an external variable declaration (as opposed to a
1321 definition: no storage is to be allocated for the variable here).
1323 STATIC_FLAG is only relevant when not at top level. In that case
1324 it indicates whether to always allocate storage to the variable. */
1327 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1328 int const_flag, int public_flag, int extern_flag,
1329 int static_flag, struct attrib *attr_list)
1334 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1335 && (global_bindings_p () || static_flag
1336 ? 0 != initializer_constant_valid_p (var_init,
1337 TREE_TYPE (var_init))
1338 : TREE_CONSTANT (var_init))));
1340 = build_decl ((const_flag && init_const
1341 /* Only make a CONST_DECL for sufficiently-small objects.
1342 We consider complex double "sufficiently-small" */
1343 && TYPE_SIZE (type) != 0
1344 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1345 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1346 GET_MODE_SIZE (DCmode)))
1347 ? CONST_DECL : VAR_DECL, var_name, type);
1349 /* If this is external, throw away any initializations unless this is a
1350 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1351 If we are defining a global here, leave a constant initialization and
1352 save any variable elaborations for the elaboration routine. If we are
1353 just annotating types, throw away the initialization if it isn't a
1356 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1357 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1360 if (global_bindings_p () && var_init != 0 && ! init_const)
1362 add_pending_elaborations (var_decl, var_init);
1366 DECL_INITIAL (var_decl) = var_init;
1367 TREE_READONLY (var_decl) = const_flag;
1368 DECL_EXTERNAL (var_decl) = extern_flag;
1369 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1370 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1371 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1372 = TYPE_VOLATILE (type);
1374 /* At the global binding level we need to allocate static storage for the
1375 variable if and only if its not external. If we are not at the top level
1376 we allocate automatic storage unless requested not to. */
1377 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1380 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1382 process_attributes (var_decl, attr_list);
1384 /* Add this decl to the current binding level and generate any
1385 needed code and RTL. */
1386 var_decl = pushdecl (var_decl);
1388 if (TREE_SIDE_EFFECTS (var_decl))
1389 TREE_ADDRESSABLE (var_decl) = 1;
1391 if (TREE_CODE (var_decl) != CONST_DECL)
1392 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1397 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1398 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1399 this field is in a record type with a "pragma pack". If SIZE is nonzero
1400 it is the specified size for this field. If POS is nonzero, it is the bit
1401 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1402 the address of this field for aliasing purposes. */
1405 create_field_decl (tree field_name,
1413 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1415 DECL_CONTEXT (field_decl) = record_type;
1416 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1418 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1419 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1420 if (packed && TYPE_MODE (field_type) == BLKmode)
1421 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1423 /* If a size is specified, use it. Otherwise, if the record type is packed
1424 compute a size to use, which may differ from the object's natural size.
1425 We always set a size in this case to trigger the checks for bitfield
1426 creation below, which is typically required when no position has been
1429 size = convert (bitsizetype, size);
1430 else if (packed == 1)
1432 size = rm_size (field_type);
1434 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1436 if (TREE_CODE (size) == INTEGER_CST
1437 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1438 size = round_up (size, BITS_PER_UNIT);
1441 /* Make a bitfield if a size is specified for two reasons: first if the size
1442 differs from the natural size. Second, if the alignment is insufficient.
1443 There are a number of ways the latter can be true.
1445 We never make a bitfield if the type of the field has a nonconstant size,
1446 or if it is claimed to be addressable, because no such entity requiring
1447 bitfield operations should reach here.
1449 We do *preventively* make a bitfield when there might be the need for it
1450 but we don't have all the necessary information to decide, as is the case
1451 of a field with no specified position in a packed record.
1453 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1454 in layout_decl or finish_record_type to clear the bit_field indication if
1455 it is in fact not needed. */
1456 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1457 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1459 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1461 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1462 bitsize_int (TYPE_ALIGN
1465 || (TYPE_ALIGN (record_type) != 0
1466 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1468 DECL_BIT_FIELD (field_decl) = 1;
1469 DECL_SIZE (field_decl) = size;
1470 if (! packed && pos == 0)
1471 DECL_ALIGN (field_decl)
1472 = (TYPE_ALIGN (record_type) != 0
1473 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1474 : TYPE_ALIGN (field_type));
1477 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1478 DECL_ALIGN (field_decl)
1479 = MAX (DECL_ALIGN (field_decl),
1480 DECL_BIT_FIELD (field_decl) ? 1
1481 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1482 : TYPE_ALIGN (field_type));
1486 /* We need to pass in the alignment the DECL is known to have.
1487 This is the lowest-order bit set in POS, but no more than
1488 the alignment of the record, if one is specified. Note
1489 that an alignment of 0 is taken as infinite. */
1490 unsigned int known_align;
1492 if (host_integerp (pos, 1))
1493 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1495 known_align = BITS_PER_UNIT;
1497 if (TYPE_ALIGN (record_type)
1498 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1499 known_align = TYPE_ALIGN (record_type);
1501 layout_decl (field_decl, known_align);
1502 SET_DECL_OFFSET_ALIGN (field_decl,
1503 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1505 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1506 &DECL_FIELD_BIT_OFFSET (field_decl),
1507 DECL_OFFSET_ALIGN (field_decl), pos);
1509 DECL_HAS_REP_P (field_decl) = 1;
1512 /* If the field type is passed by reference, we will have pointers to the
1513 field, so it is addressable. */
1514 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1517 /* ??? For now, we say that any field of aggregate type is addressable
1518 because the front end may take 'Reference of it. */
1519 if (AGGREGATE_TYPE_P (field_type))
1522 /* Mark the decl as nonaddressable if it is indicated so semantically,
1523 meaning we won't ever attempt to take the address of the field.
1525 It may also be "technically" nonaddressable, meaning that even if we
1526 attempt to take the field's address we will actually get the address of a
1527 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1528 we have at this point is not accurate enough, so we don't account for
1529 this here and let finish_record_type decide. */
1530 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1535 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1536 effects, has the value of zero. */
1539 value_zerop (tree exp)
1541 if (TREE_CODE (exp) == COMPOUND_EXPR)
1542 return value_zerop (TREE_OPERAND (exp, 1));
1544 return integer_zerop (exp);
1547 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1548 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1549 readonly (either an IN parameter or an address of a pass-by-ref
1553 create_param_decl (tree param_name, tree param_type, int readonly)
1555 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1557 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1558 lead to various ABI violations. */
1559 if (targetm.calls.promote_prototypes (param_type)
1560 && (TREE_CODE (param_type) == INTEGER_TYPE
1561 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1562 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1564 /* We have to be careful about biased types here. Make a subtype
1565 of integer_type_node with the proper biasing. */
1566 if (TREE_CODE (param_type) == INTEGER_TYPE
1567 && TYPE_BIASED_REPRESENTATION_P (param_type))
1570 = copy_type (build_range_type (integer_type_node,
1571 TYPE_MIN_VALUE (param_type),
1572 TYPE_MAX_VALUE (param_type)));
1574 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1577 param_type = integer_type_node;
1580 DECL_ARG_TYPE (param_decl) = param_type;
1581 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1582 TREE_READONLY (param_decl) = readonly;
1586 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1589 process_attributes (tree decl, struct attrib *attr_list)
1591 for (; attr_list; attr_list = attr_list->next)
1592 switch (attr_list->type)
1594 case ATTR_MACHINE_ATTRIBUTE:
1595 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1597 ATTR_FLAG_TYPE_IN_PLACE);
1600 case ATTR_LINK_ALIAS:
1601 TREE_STATIC (decl) = 1;
1602 assemble_alias (decl, attr_list->name);
1605 case ATTR_WEAK_EXTERNAL:
1607 declare_weak (decl);
1609 post_error ("?weak declarations not supported on this target",
1610 attr_list->error_point);
1613 case ATTR_LINK_SECTION:
1614 if (targetm.have_named_sections)
1616 DECL_SECTION_NAME (decl)
1617 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1618 IDENTIFIER_POINTER (attr_list->name));
1621 post_error ("?section attributes are not supported for this target",
1622 attr_list->error_point);
1627 /* Add some pending elaborations on the list. */
1630 add_pending_elaborations (tree var_decl, tree var_init)
1633 Check_Elaboration_Code_Allowed (error_gnat_node);
1635 pending_elaborations
1636 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1639 /* Obtain any pending elaborations and clear the old list. */
1642 get_pending_elaborations (void)
1644 /* Each thing added to the list went on the end; we want it on the
1646 tree result = TREE_CHAIN (pending_elaborations);
1648 TREE_CHAIN (pending_elaborations) = 0;
1652 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1656 value_factor_p (tree value, int factor)
1658 if (host_integerp (value, 1))
1659 return tree_low_cst (value, 1) % factor == 0;
1661 if (TREE_CODE (value) == MULT_EXPR)
1662 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1663 || value_factor_p (TREE_OPERAND (value, 1), factor));
1668 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1669 unless we can prove these 2 fields are laid out in such a way that no gap
1670 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1671 is the distance in bits between the end of PREV_FIELD and the starting
1672 position of CURR_FIELD. It is ignored if null. */
1675 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1677 /* If this is the first field of the record, there cannot be any gap */
1681 /* If the previous field is a union type, then return False: The only
1682 time when such a field is not the last field of the record is when
1683 there are other components at fixed positions after it (meaning there
1684 was a rep clause for every field), in which case we don't want the
1685 alignment constraint to override them. */
1686 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1689 /* If the distance between the end of prev_field and the begining of
1690 curr_field is constant, then there is a gap if the value of this
1691 constant is not null. */
1692 if (offset && host_integerp (offset, 1))
1693 return (!integer_zerop (offset));
1695 /* If the size and position of the previous field are constant,
1696 then check the sum of this size and position. There will be a gap
1697 iff it is not multiple of the current field alignment. */
1698 if (host_integerp (DECL_SIZE (prev_field), 1)
1699 && host_integerp (bit_position (prev_field), 1))
1700 return ((tree_low_cst (bit_position (prev_field), 1)
1701 + tree_low_cst (DECL_SIZE (prev_field), 1))
1702 % DECL_ALIGN (curr_field) != 0);
1704 /* If both the position and size of the previous field are multiples
1705 of the current field alignment, there can not be any gap. */
1706 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1707 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1710 /* Fallback, return that there may be a potential gap */
1714 /* Return nonzero if there are pending elaborations. */
1717 pending_elaborations_p (void)
1719 return TREE_CHAIN (pending_elaborations) != 0;
1722 /* Save a copy of the current pending elaboration list and make a new
1726 push_pending_elaborations (void)
1728 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1730 p->next = elist_stack;
1731 p->elab_list = pending_elaborations;
1733 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1736 /* Pop the stack of pending elaborations. */
1739 pop_pending_elaborations (void)
1741 struct e_stack *p = elist_stack;
1743 pending_elaborations = p->elab_list;
1744 elist_stack = p->next;
1747 /* Return the current position in pending_elaborations so we can insert
1748 elaborations after that point. */
1751 get_elaboration_location (void)
1753 return tree_last (pending_elaborations);
1756 /* Insert the current elaborations after ELAB, which is in some elaboration
1760 insert_elaboration_list (tree elab)
1762 tree next = TREE_CHAIN (elab);
1764 if (TREE_CHAIN (pending_elaborations))
1766 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1767 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1768 TREE_CHAIN (pending_elaborations) = 0;
1772 /* Returns a LABEL_DECL node for LABEL_NAME. */
1775 create_label_decl (tree label_name)
1777 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1779 DECL_CONTEXT (label_decl) = current_function_decl;
1780 DECL_MODE (label_decl) = VOIDmode;
1781 DECL_SOURCE_LOCATION (label_decl) = input_location;
1786 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1787 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1788 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1789 PARM_DECL nodes chained through the TREE_CHAIN field).
1791 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1792 appropriate fields in the FUNCTION_DECL. */
1795 create_subprog_decl (tree subprog_name,
1798 tree param_decl_list,
1802 struct attrib *attr_list)
1804 tree return_type = TREE_TYPE (subprog_type);
1805 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1807 /* If this is a function nested inside an inlined external function, it
1808 means we aren't going to compile the outer function unless it is
1809 actually inlined, so do the same for us. */
1810 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1811 && DECL_EXTERNAL (current_function_decl))
1814 DECL_EXTERNAL (subprog_decl) = extern_flag;
1815 TREE_PUBLIC (subprog_decl) = public_flag;
1816 TREE_STATIC (subprog_decl) = 1;
1817 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1818 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1819 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1820 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1821 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1824 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1827 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1829 process_attributes (subprog_decl, attr_list);
1831 /* Add this decl to the current binding level. */
1832 subprog_decl = pushdecl (subprog_decl);
1834 /* Output the assembler code and/or RTL for the declaration. */
1835 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1837 return subprog_decl;
1840 /* Count how deep we are into nested functions. This is because
1841 we shouldn't call the backend function context routines unless we
1842 are in a nested function. */
1844 static int function_nesting_depth;
1846 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1847 body. This routine needs to be invoked before processing the declarations
1848 appearing in the subprogram. */
1851 begin_subprog_body (tree subprog_decl)
1855 if (function_nesting_depth++ != 0)
1856 push_function_context ();
1858 announce_function (subprog_decl);
1860 /* Make this field nonzero so further routines know that this is not
1861 tentative. error_mark_node is replaced below with the adequate BLOCK. */
1862 DECL_INITIAL (subprog_decl) = error_mark_node;
1864 /* This function exists in static storage. This does not mean `static' in
1866 TREE_STATIC (subprog_decl) = 1;
1868 /* Enter a new binding level and show that all the parameters belong to
1870 current_function_decl = subprog_decl;
1873 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1874 param_decl = TREE_CHAIN (param_decl))
1875 DECL_CONTEXT (param_decl) = subprog_decl;
1877 init_function_start (subprog_decl);
1878 expand_function_start (subprog_decl, 0);
1881 /* Finish the definition of the current subprogram and compile it all the way
1882 to assembler language output. BODY is the tree corresponding to
1886 end_subprog_body (tree body)
1888 tree fndecl = current_function_decl;
1890 /* Mark the BLOCK for this level as being for this function and pop the
1891 level. Since the vars in it are the parameters, clear them. */
1892 BLOCK_VARS (current_binding_level->block) = 0;
1893 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1894 DECL_INITIAL (fndecl) = current_binding_level->block;
1897 /* Deal with inline. If declared inline or we should default to inline,
1898 set the flag in the decl. */
1899 DECL_INLINE (fndecl)
1900 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
1902 /* Initialize the RTL code for the function. */
1903 allocate_struct_function (fndecl);
1905 /* We handle pending sizes via the elaboration of types, so we don't
1906 need to save them. */
1907 get_pending_sizes ();
1909 /* Mark the RESULT_DECL as being in this subprogram. */
1910 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1912 DECL_SAVED_TREE (fndecl) = body;
1914 current_function_decl = DECL_CONTEXT (fndecl);
1916 /* If we're only annotating types, don't actually compile this function. */
1917 if (type_annotate_only)
1920 /* We do different things for nested and non-nested functions.
1921 ??? This should be in cgraph. */
1922 if (!DECL_CONTEXT (fndecl))
1924 gnat_gimplify_function (fndecl);
1925 lower_nested_functions (fndecl);
1926 gnat_finalize (fndecl);
1929 /* Register this function with cgraph just far enough to get it
1930 added to our parent's nested function list. */
1931 (void) cgraph_node (fndecl);
1934 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1937 gnat_gimplify_function (tree fndecl)
1939 struct cgraph_node *cgn;
1941 dump_function (TDI_original, fndecl);
1942 gimplify_function_tree (fndecl);
1943 dump_function (TDI_generic, fndecl);
1945 /* Convert all nested functions to GIMPLE now. We do things in this order
1946 so that items like VLA sizes are expanded properly in the context of the
1947 correct function. */
1948 cgn = cgraph_node (fndecl);
1949 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1950 gnat_gimplify_function (cgn->decl);
1953 /* Give FNDECL and all its nested functions to cgraph for compilation. */
1956 gnat_finalize (tree fndecl)
1958 struct cgraph_node *cgn;
1960 /* Finalize all nested functions now. */
1961 cgn = cgraph_node (fndecl);
1962 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
1963 gnat_finalize (cgn->decl);
1965 cgraph_finalize_function (fndecl, false);
1968 /* Return a definition for a builtin function named NAME and whose data type
1969 is TYPE. TYPE should be a function type with argument types.
1970 FUNCTION_CODE tells later passes how to compile calls to this function.
1971 See tree.h for its possible values.
1973 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1974 the name to be called if we can't opencode the function. If
1975 ATTRS is nonzero, use that for the function attribute list. */
1978 builtin_function (const char *name,
1981 enum built_in_class class,
1982 const char *library_name,
1985 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1987 DECL_EXTERNAL (decl) = 1;
1988 TREE_PUBLIC (decl) = 1;
1990 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1993 DECL_BUILT_IN_CLASS (decl) = class;
1994 DECL_FUNCTION_CODE (decl) = function_code;
1996 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
2000 /* Return an integer type with the number of bits of precision given by
2001 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2002 it is a signed type. */
2005 gnat_type_for_size (unsigned precision, int unsignedp)
2010 if (precision <= 2 * MAX_BITS_PER_WORD
2011 && signed_and_unsigned_types[precision][unsignedp] != 0)
2012 return signed_and_unsigned_types[precision][unsignedp];
2015 t = make_unsigned_type (precision);
2017 t = make_signed_type (precision);
2019 if (precision <= 2 * MAX_BITS_PER_WORD)
2020 signed_and_unsigned_types[precision][unsignedp] = t;
2022 if (TYPE_NAME (t) == 0)
2024 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2025 TYPE_NAME (t) = get_identifier (type_name);
2031 /* Likewise for floating-point types. */
2034 float_type_for_precision (int precision, enum machine_mode mode)
2039 if (float_types[(int) mode] != 0)
2040 return float_types[(int) mode];
2042 float_types[(int) mode] = t = make_node (REAL_TYPE);
2043 TYPE_PRECISION (t) = precision;
2046 if (TYPE_MODE (t) != mode)
2049 if (TYPE_NAME (t) == 0)
2051 sprintf (type_name, "FLOAT_%d", precision);
2052 TYPE_NAME (t) = get_identifier (type_name);
2058 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2059 an unsigned type; otherwise a signed type is returned. */
2062 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2064 if (mode == BLKmode)
2066 else if (mode == VOIDmode)
2067 return void_type_node;
2068 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2069 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2071 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2074 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2077 gnat_unsigned_type (tree type_node)
2079 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2081 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2083 type = copy_node (type);
2084 TREE_TYPE (type) = type_node;
2086 else if (TREE_TYPE (type_node) != 0
2087 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2088 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2090 type = copy_node (type);
2091 TREE_TYPE (type) = TREE_TYPE (type_node);
2097 /* Return the signed version of a TYPE_NODE, a scalar type. */
2100 gnat_signed_type (tree type_node)
2102 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2104 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2106 type = copy_node (type);
2107 TREE_TYPE (type) = type_node;
2109 else if (TREE_TYPE (type_node) != 0
2110 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2111 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2113 type = copy_node (type);
2114 TREE_TYPE (type) = TREE_TYPE (type_node);
2120 /* Return a type the same as TYPE except unsigned or signed according to
2124 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2126 if (! INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
2129 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2132 /* EXP is an expression for the size of an object. If this size contains
2133 discriminant references, replace them with the maximum (if MAX_P) or
2134 minimum (if ! MAX_P) possible value of the discriminant. */
2137 max_size (tree exp, int max_p)
2139 enum tree_code code = TREE_CODE (exp);
2140 tree type = TREE_TYPE (exp);
2142 switch (TREE_CODE_CLASS (code))
2149 if (code == TREE_LIST)
2150 return tree_cons (TREE_PURPOSE (exp),
2151 max_size (TREE_VALUE (exp), max_p),
2152 TREE_CHAIN (exp) != 0
2153 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2157 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2158 modify. Otherwise, we treat it like a variable. */
2159 if (! CONTAINS_PLACEHOLDER_P (exp))
2162 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2164 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2167 return max_p ? size_one_node : size_zero_node;
2172 switch (TREE_CODE_LENGTH (code))
2175 if (code == NON_LVALUE_EXPR)
2176 return max_size (TREE_OPERAND (exp, 0), max_p);
2179 fold (build1 (code, type,
2180 max_size (TREE_OPERAND (exp, 0),
2181 code == NEGATE_EXPR ? ! max_p : max_p)));
2184 if (code == RTL_EXPR)
2186 else if (code == COMPOUND_EXPR)
2187 return max_size (TREE_OPERAND (exp, 1), max_p);
2190 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2191 tree rhs = max_size (TREE_OPERAND (exp, 1),
2192 code == MINUS_EXPR ? ! max_p : max_p);
2194 /* Special-case wanting the maximum value of a MIN_EXPR.
2195 In that case, if one side overflows, return the other.
2196 sizetype is signed, but we know sizes are non-negative.
2197 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2198 overflowing or the maximum possible value and the RHS
2200 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2202 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2204 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2205 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2206 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2207 && ! TREE_CONSTANT (rhs))
2210 return fold (build (code, type, lhs, rhs));
2214 if (code == SAVE_EXPR)
2216 else if (code == COND_EXPR)
2217 return fold (build (max_p ? MAX_EXPR : MIN_EXPR, type,
2218 max_size (TREE_OPERAND (exp, 1), max_p),
2219 max_size (TREE_OPERAND (exp, 2), max_p)));
2220 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2221 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2222 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
2229 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2230 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2231 Return a constructor for the template. */
2234 build_template (tree template_type, tree array_type, tree expr)
2236 tree template_elts = NULL_TREE;
2237 tree bound_list = NULL_TREE;
2240 if (TREE_CODE (array_type) == RECORD_TYPE
2241 && (TYPE_IS_PADDING_P (array_type)
2242 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2243 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2245 if (TREE_CODE (array_type) == ARRAY_TYPE
2246 || (TREE_CODE (array_type) == INTEGER_TYPE
2247 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2248 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2250 /* First make the list for a CONSTRUCTOR for the template. Go down the
2251 field list of the template instead of the type chain because this
2252 array might be an Ada array of arrays and we can't tell where the
2253 nested arrays stop being the underlying object. */
2255 for (field = TYPE_FIELDS (template_type); field;
2257 ? (bound_list = TREE_CHAIN (bound_list))
2258 : (array_type = TREE_TYPE (array_type))),
2259 field = TREE_CHAIN (TREE_CHAIN (field)))
2261 tree bounds, min, max;
2263 /* If we have a bound list, get the bounds from there. Likewise
2264 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2265 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2266 This will give us a maximum range. */
2267 if (bound_list != 0)
2268 bounds = TREE_VALUE (bound_list);
2269 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2270 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2271 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2272 && DECL_BY_COMPONENT_PTR_P (expr))
2273 bounds = TREE_TYPE (field);
2277 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2278 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2280 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2281 substitute it from OBJECT. */
2282 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2283 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2285 template_elts = tree_cons (TREE_CHAIN (field), max,
2286 tree_cons (field, min, template_elts));
2289 return gnat_build_constructor (template_type, nreverse (template_elts));
2292 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2293 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2294 in the type contains in its DECL_INITIAL the expression to use when
2295 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2296 to print out an error message if the mechanism cannot be applied to
2297 an object of that type and also for the name. */
2300 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2302 tree record_type = make_node (RECORD_TYPE);
2303 tree field_list = 0;
2312 /* If TYPE is an unconstrained array, use the underlying array type. */
2313 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2314 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2316 /* If this is an array, compute the number of dimensions in the array,
2317 get the index types, and point to the inner type. */
2318 if (TREE_CODE (type) != ARRAY_TYPE)
2321 for (ndim = 1, inner_type = type;
2322 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2323 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2324 ndim++, inner_type = TREE_TYPE (inner_type))
2327 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2329 if (mech != By_Descriptor_NCA
2330 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2331 for (i = ndim - 1, inner_type = type;
2333 i--, inner_type = TREE_TYPE (inner_type))
2334 idx_arr[i] = TYPE_DOMAIN (inner_type);
2336 for (i = 0, inner_type = type;
2338 i++, inner_type = TREE_TYPE (inner_type))
2339 idx_arr[i] = TYPE_DOMAIN (inner_type);
2341 /* Now get the DTYPE value. */
2342 switch (TREE_CODE (type))
2346 if (TYPE_VAX_FLOATING_POINT_P (type))
2347 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2360 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2363 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2366 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2369 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2372 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2375 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2381 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2385 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2386 && TYPE_VAX_FLOATING_POINT_P (type))
2387 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2399 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2410 /* Get the CLASS value. */
2413 case By_Descriptor_A:
2416 case By_Descriptor_NCA:
2419 case By_Descriptor_SB:
2426 /* Make the type for a descriptor for VMS. The first four fields
2427 are the same for all types. */
2430 = chainon (field_list,
2431 make_descriptor_field
2432 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2433 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2435 field_list = chainon (field_list,
2436 make_descriptor_field ("DTYPE",
2437 gnat_type_for_size (8, 1),
2438 record_type, size_int (dtype)));
2439 field_list = chainon (field_list,
2440 make_descriptor_field ("CLASS",
2441 gnat_type_for_size (8, 1),
2442 record_type, size_int (class)));
2445 = chainon (field_list,
2446 make_descriptor_field
2448 build_pointer_type_for_mode (type, SImode, false), record_type,
2450 build_pointer_type_for_mode (type, SImode, false),
2451 build (PLACEHOLDER_EXPR, type))));
2456 case By_Descriptor_S:
2459 case By_Descriptor_SB:
2461 = chainon (field_list,
2462 make_descriptor_field
2463 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2464 TREE_CODE (type) == ARRAY_TYPE
2465 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2467 = chainon (field_list,
2468 make_descriptor_field
2469 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2470 TREE_CODE (type) == ARRAY_TYPE
2471 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2474 case By_Descriptor_A:
2475 case By_Descriptor_NCA:
2476 field_list = chainon (field_list,
2477 make_descriptor_field ("SCALE",
2478 gnat_type_for_size (8, 1),
2482 field_list = chainon (field_list,
2483 make_descriptor_field ("DIGITS",
2484 gnat_type_for_size (8, 1),
2489 = chainon (field_list,
2490 make_descriptor_field
2491 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2492 size_int (mech == By_Descriptor_NCA
2494 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2495 : (TREE_CODE (type) == ARRAY_TYPE
2496 && TYPE_CONVENTION_FORTRAN_P (type)
2499 field_list = chainon (field_list,
2500 make_descriptor_field ("DIMCT",
2501 gnat_type_for_size (8, 1),
2505 field_list = chainon (field_list,
2506 make_descriptor_field ("ARSIZE",
2507 gnat_type_for_size (32, 1),
2509 size_in_bytes (type)));
2511 /* Now build a pointer to the 0,0,0... element. */
2512 tem = build (PLACEHOLDER_EXPR, type);
2513 for (i = 0, inner_type = type; i < ndim;
2514 i++, inner_type = TREE_TYPE (inner_type))
2515 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2516 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2517 NULL_TREE, NULL_TREE);
2520 = chainon (field_list,
2521 make_descriptor_field
2523 build_pointer_type_for_mode (inner_type, SImode, false),
2526 build_pointer_type_for_mode (inner_type, SImode,
2530 /* Next come the addressing coefficients. */
2532 for (i = 0; i < ndim; i++)
2536 = size_binop (MULT_EXPR, tem,
2537 size_binop (PLUS_EXPR,
2538 size_binop (MINUS_EXPR,
2539 TYPE_MAX_VALUE (idx_arr[i]),
2540 TYPE_MIN_VALUE (idx_arr[i])),
2543 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2544 fname[1] = '0' + i, fname[2] = 0;
2546 = chainon (field_list,
2547 make_descriptor_field (fname,
2548 gnat_type_for_size (32, 1),
2549 record_type, idx_length));
2551 if (mech == By_Descriptor_NCA)
2555 /* Finally here are the bounds. */
2556 for (i = 0; i < ndim; i++)
2560 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2562 = chainon (field_list,
2563 make_descriptor_field
2564 (fname, gnat_type_for_size (32, 1), record_type,
2565 TYPE_MIN_VALUE (idx_arr[i])));
2569 = chainon (field_list,
2570 make_descriptor_field
2571 (fname, gnat_type_for_size (32, 1), record_type,
2572 TYPE_MAX_VALUE (idx_arr[i])));
2577 post_error ("unsupported descriptor type for &", gnat_entity);
2580 finish_record_type (record_type, field_list, 0, 1);
2581 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2587 /* Utility routine for above code to make a field. */
2590 make_descriptor_field (const char *name, tree type,
2591 tree rec_type, tree initial)
2594 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2596 DECL_INITIAL (field) = initial;
2600 /* Build a type to be used to represent an aliased object whose nominal
2601 type is an unconstrained array. This consists of a RECORD_TYPE containing
2602 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2603 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2604 is used to represent an arbitrary unconstrained object. Use NAME
2605 as the name of the record. */
2608 build_unc_object_type (tree template_type, tree object_type, tree name)
2610 tree type = make_node (RECORD_TYPE);
2611 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2612 template_type, type, 0, 0, 0, 1);
2613 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2616 TYPE_NAME (type) = name;
2617 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2618 finish_record_type (type,
2619 chainon (chainon (NULL_TREE, template_field),
2626 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2627 the normal case this is just two adjustments, but we have more to do
2628 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2631 update_pointer_to (tree old_type, tree new_type)
2633 tree ptr = TYPE_POINTER_TO (old_type);
2634 tree ref = TYPE_REFERENCE_TO (old_type);
2638 /* If this is the main variant, process all the other variants first. */
2639 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2640 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2641 type = TYPE_NEXT_VARIANT (type))
2642 update_pointer_to (type, new_type);
2644 /* If no pointer or reference, we are done. */
2645 if (ptr == 0 && ref == 0)
2648 /* Merge the old type qualifiers in the new type.
2650 Each old variant has qualifiers for specific reasons, and the new
2651 designated type as well. Each set of qualifiers represents useful
2652 information grabbed at some point, and merging the two simply unifies
2653 these inputs into the final type description.
2655 Consider for instance a volatile type frozen after an access to constant
2656 type designating it. After the designated type freeze, we get here with a
2657 volatile new_type and a dummy old_type with a readonly variant, created
2658 when the access type was processed. We shall make a volatile and readonly
2659 designated type, because that's what it really is.
2661 We might also get here for a non-dummy old_type variant with different
2662 qualifiers than the new_type ones, for instance in some cases of pointers
2663 to private record type elaboration (see the comments around the call to
2664 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2665 qualifiers in thoses cases too, to avoid accidentally discarding the
2666 initial set, and will often end up with old_type == new_type then. */
2667 new_type = build_qualified_type (new_type,
2668 TYPE_QUALS (old_type)
2669 | TYPE_QUALS (new_type));
2671 /* If the new type and the old one are identical, there is nothing to
2673 if (old_type == new_type)
2676 /* Otherwise, first handle the simple case. */
2677 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2679 TYPE_POINTER_TO (new_type) = ptr;
2680 TYPE_REFERENCE_TO (new_type) = ref;
2682 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2683 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
2684 ptr1 = TYPE_NEXT_VARIANT (ptr1))
2686 TREE_TYPE (ptr1) = new_type;
2688 if (TYPE_NAME (ptr1) != 0
2689 && TREE_CODE (TYPE_NAME (ptr1)) == TYPE_DECL
2690 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2691 rest_of_decl_compilation (TYPE_NAME (ptr1), NULL,
2692 global_bindings_p (), 0);
2695 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2696 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
2697 ref1 = TYPE_NEXT_VARIANT (ref1))
2699 TREE_TYPE (ref1) = new_type;
2701 if (TYPE_NAME (ref1) != 0
2702 && TREE_CODE (TYPE_NAME (ref1)) == TYPE_DECL
2703 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2704 rest_of_decl_compilation (TYPE_NAME (ref1), NULL,
2705 global_bindings_p (), 0);
2709 /* Now deal with the unconstrained array case. In this case the "pointer"
2710 is actually a RECORD_TYPE where the types of both fields are
2711 pointers to void. In that case, copy the field list from the
2712 old type to the new one and update the fields' context. */
2713 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2718 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2723 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2724 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2725 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2727 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2730 ??? This is now the only use of gnat_substitute_in_type, which
2731 is now a very "heavy" routine to do this, so it should be replaced
2733 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2734 new_ref = build (COMPONENT_REF, ptr_temp_type,
2735 build (PLACEHOLDER_EXPR, ptr),
2736 TREE_CHAIN (TYPE_FIELDS (ptr)), NULL_TREE);
2739 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2740 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2741 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2743 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2744 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2746 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2747 = TREE_TYPE (new_type) = ptr;
2749 /* Now handle updating the allocation record, what the thin pointer
2750 points to. Update all pointers from the old record into the new
2751 one, update the types of the fields, and recompute the size. */
2753 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2755 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2756 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2757 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2758 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2759 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2760 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2761 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2763 TYPE_SIZE (new_obj_rec)
2764 = size_binop (PLUS_EXPR,
2765 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2766 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2767 TYPE_SIZE_UNIT (new_obj_rec)
2768 = size_binop (PLUS_EXPR,
2769 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2770 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2771 rest_of_type_compilation (ptr, global_bindings_p ());
2775 /* Convert a pointer to a constrained array into a pointer to a fat
2776 pointer. This involves making or finding a template. */
2779 convert_to_fat_pointer (tree type, tree expr)
2781 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2782 tree template, template_addr;
2783 tree etype = TREE_TYPE (expr);
2785 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2786 pointer to the template and array. */
2787 if (integer_zerop (expr))
2789 gnat_build_constructor
2791 tree_cons (TYPE_FIELDS (type),
2792 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2793 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2794 convert (build_pointer_type (template_type),
2798 /* If EXPR is a thin pointer, make the template and data from the record. */
2800 else if (TYPE_THIN_POINTER_P (etype))
2802 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2804 expr = save_expr (expr);
2805 if (TREE_CODE (expr) == ADDR_EXPR)
2806 expr = TREE_OPERAND (expr, 0);
2808 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2810 template = build_component_ref (expr, NULL_TREE, fields, 0);
2811 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2812 build_component_ref (expr, NULL_TREE,
2813 TREE_CHAIN (fields), 0));
2816 /* Otherwise, build the constructor for the template. */
2817 template = build_template (template_type, TREE_TYPE (etype), expr);
2819 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2821 /* The result is a CONSTRUCTOR for the fat pointer.
2823 If expr is an argument of a foreign convention subprogram, the type it
2824 points to is directly the component type. In this case, the expression
2825 type may not match the corresponding FIELD_DECL type at this point, so we
2826 call "convert" here to fix that up if necessary. This type consistency is
2827 required, for instance because it ensures that possible later folding of
2828 component_refs against this constructor always yields something of the
2829 same type as the initial reference.
2831 Note that the call to "build_template" above is still fine, because it
2832 will only refer to the provided template_type in this case. */
2834 gnat_build_constructor
2835 (type, tree_cons (TYPE_FIELDS (type),
2836 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2837 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2838 template_addr, NULL_TREE)));
2841 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2842 is something that is a fat pointer, so convert to it first if it EXPR
2843 is not already a fat pointer. */
2846 convert_to_thin_pointer (tree type, tree expr)
2848 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2850 = convert_to_fat_pointer
2851 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2853 /* We get the pointer to the data and use a NOP_EXPR to make it the
2856 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2857 expr = build1 (NOP_EXPR, type, expr);
2862 /* Create an expression whose value is that of EXPR,
2863 converted to type TYPE. The TREE_TYPE of the value
2864 is always TYPE. This function implements all reasonable
2865 conversions; callers should filter out those that are
2866 not permitted by the language being compiled. */
2869 convert (tree type, tree expr)
2871 enum tree_code code = TREE_CODE (type);
2872 tree etype = TREE_TYPE (expr);
2873 enum tree_code ecode = TREE_CODE (etype);
2876 /* If EXPR is already the right type, we are done. */
2880 /* If the input type has padding, remove it by doing a component reference
2881 to the field. If the output type has padding, make a constructor
2882 to build the record. If both input and output have padding and are
2883 of variable size, do this as an unchecked conversion. */
2884 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2885 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2886 && (! TREE_CONSTANT (TYPE_SIZE (type))
2887 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2889 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2891 /* If we have just converted to this padded type, just get
2892 the inner expression. */
2893 if (TREE_CODE (expr) == CONSTRUCTOR
2894 && CONSTRUCTOR_ELTS (expr) != 0
2895 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2896 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2898 return convert (type, build_component_ref (expr, NULL_TREE,
2899 TYPE_FIELDS (etype), 0));
2901 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2903 /* If we previously converted from another type and our type is
2904 of variable size, remove the conversion to avoid the need for
2905 variable-size temporaries. */
2906 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2907 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2908 expr = TREE_OPERAND (expr, 0);
2910 /* If we are just removing the padding from expr, convert the original
2911 object if we have variable size. That will avoid the need
2912 for some variable-size temporaries. */
2913 if (TREE_CODE (expr) == COMPONENT_REF
2914 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2915 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2916 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2917 return convert (type, TREE_OPERAND (expr, 0));
2919 /* If the result type is a padded type with a self-referentially-sized
2920 field and the expression type is a record, do this as an
2921 unchecked converstion. */
2922 else if (TREE_CODE (etype) == RECORD_TYPE
2923 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2924 return unchecked_convert (type, expr, 0);
2928 gnat_build_constructor (type,
2929 tree_cons (TYPE_FIELDS (type),
2931 (TYPE_FIELDS (type)),
2936 /* If the input is a biased type, adjust first. */
2937 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2938 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2939 fold (build1 (NOP_EXPR,
2940 TREE_TYPE (etype), expr)),
2941 TYPE_MIN_VALUE (etype))));
2943 /* If the input is a left-justified modular type, we need to extract
2944 the actual object before converting it to any other type with the
2945 exception of an unconstrained array. */
2946 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2947 && code != UNCONSTRAINED_ARRAY_TYPE)
2948 return convert (type, build_component_ref (expr, NULL_TREE,
2949 TYPE_FIELDS (etype), 0));
2951 /* If converting to a type that contains a template, convert to the data
2952 type and then build the template. */
2953 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2955 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2957 /* If the source already has a template, get a reference to the
2958 associated array only, as we are going to rebuild a template
2959 for the target type anyway. */
2960 expr = maybe_unconstrained_array (expr);
2963 gnat_build_constructor
2965 tree_cons (TYPE_FIELDS (type),
2966 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2967 obj_type, NULL_TREE),
2968 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2969 convert (obj_type, expr), NULL_TREE)));
2972 /* There are some special cases of expressions that we process
2974 switch (TREE_CODE (expr))
2980 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2981 conversion in gnat_expand_expr. NULL_EXPR does not represent
2982 and actual value, so no conversion is needed. */
2983 expr = copy_node (expr);
2984 TREE_TYPE (expr) = type;
2989 /* If we are converting a STRING_CST to another constrained array type,
2990 just make a new one in the proper type. Likewise for
2991 CONSTRUCTOR if the alias sets are the same. */
2992 if (code == ecode && AGGREGATE_TYPE_P (etype)
2993 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2994 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2995 && (TREE_CODE (expr) == STRING_CST
2996 || get_alias_set (etype) == get_alias_set (type)))
2998 expr = copy_node (expr);
2999 TREE_TYPE (expr) = type;
3005 /* If we are converting between two aggregate types of the same
3006 kind, size, mode, and alignment, just make a new COMPONENT_REF.
3007 This avoid unneeded conversions which makes reference computations
3009 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
3010 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3011 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3012 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0)
3013 && get_alias_set (type) == get_alias_set (etype))
3014 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
3015 TREE_OPERAND (expr, 1), NULL_TREE);
3019 case UNCONSTRAINED_ARRAY_REF:
3020 /* Convert this to the type of the inner array by getting the address of
3021 the array from the template. */
3022 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3023 build_component_ref (TREE_OPERAND (expr, 0),
3024 get_identifier ("P_ARRAY"),
3026 etype = TREE_TYPE (expr);
3027 ecode = TREE_CODE (etype);
3030 case VIEW_CONVERT_EXPR:
3031 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3032 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3033 return convert (type, TREE_OPERAND (expr, 0));
3037 /* If both types are record types, just convert the pointer and
3038 make a new INDIRECT_REF.
3040 ??? Disable this for now since it causes problems with the
3041 code in build_binary_op for MODIFY_EXPR which wants to
3042 strip off conversions. But that code really is a mess and
3043 we need to do this a much better way some time. */
3045 && (TREE_CODE (type) == RECORD_TYPE
3046 || TREE_CODE (type) == UNION_TYPE)
3047 && (TREE_CODE (etype) == RECORD_TYPE
3048 || TREE_CODE (etype) == UNION_TYPE)
3049 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3050 return build_unary_op (INDIRECT_REF, NULL_TREE,
3051 convert (build_pointer_type (type),
3052 TREE_OPERAND (expr, 0)));
3059 /* Check for converting to a pointer to an unconstrained array. */
3060 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3061 return convert_to_fat_pointer (type, expr);
3063 /* If we're converting between two aggregate types that have the same main
3064 variant, just make a VIEW_CONVER_EXPR. */
3065 else if (AGGREGATE_TYPE_P (type)
3066 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3067 return build1 (VIEW_CONVERT_EXPR, type, expr);
3069 /* In all other cases of related types, make a NOP_EXPR. */
3070 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3071 || (code == INTEGER_CST && ecode == INTEGER_CST
3072 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3073 return fold (build1 (NOP_EXPR, type, expr));
3078 return build1 (CONVERT_EXPR, type, expr);
3081 return fold (build1 (NOP_EXPR, type, gnat_truthvalue_conversion (expr)));
3084 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3085 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3086 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3087 return unchecked_convert (type, expr, 0);
3088 else if (TYPE_BIASED_REPRESENTATION_P (type))
3089 return fold (build1 (CONVERT_EXPR, type,
3090 fold (build (MINUS_EXPR, TREE_TYPE (type),
3091 convert (TREE_TYPE (type), expr),
3092 TYPE_MIN_VALUE (type)))));
3094 /* ... fall through ... */
3097 return fold (convert_to_integer (type, expr));
3100 case REFERENCE_TYPE:
3101 /* If converting between two pointers to records denoting
3102 both a template and type, adjust if needed to account
3103 for any differing offsets, since one might be negative. */
3104 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3107 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3108 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3109 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3110 sbitsize_int (BITS_PER_UNIT));
3112 expr = build1 (NOP_EXPR, type, expr);
3113 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3114 if (integer_zerop (byte_diff))
3117 return build_binary_op (PLUS_EXPR, type, expr,
3118 fold (convert_to_pointer (type, byte_diff)));
3121 /* If converting to a thin pointer, handle specially. */
3122 if (TYPE_THIN_POINTER_P (type)
3123 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3124 return convert_to_thin_pointer (type, expr);
3126 /* If converting fat pointer to normal pointer, get the pointer to the
3127 array and then convert it. */
3128 else if (TYPE_FAT_POINTER_P (etype))
3129 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3132 return fold (convert_to_pointer (type, expr));
3135 return fold (convert_to_real (type, expr));
3138 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3140 gnat_build_constructor
3141 (type, tree_cons (TYPE_FIELDS (type),
3142 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3145 /* ... fall through ... */
3148 /* In these cases, assume the front-end has validated the conversion.
3149 If the conversion is valid, it will be a bit-wise conversion, so
3150 it can be viewed as an unchecked conversion. */
3151 return unchecked_convert (type, expr, 0);
3154 /* Just validate that the type is indeed that of a field
3155 of the type. Then make the simple conversion. */
3156 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3158 if (TREE_TYPE (tem) == etype)
3159 return build1 (CONVERT_EXPR, type, expr);
3160 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3161 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3162 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3163 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3164 return build1 (CONVERT_EXPR, type,
3165 convert (TREE_TYPE (tem), expr));
3170 case UNCONSTRAINED_ARRAY_TYPE:
3171 /* If EXPR is a constrained array, take its address, convert it to a
3172 fat pointer, and then dereference it. Likewise if EXPR is a
3173 record containing both a template and a constrained array.
3174 Note that a record representing a left justified modular type
3175 always represents a packed constrained array. */
3176 if (ecode == ARRAY_TYPE
3177 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3178 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3179 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3182 (INDIRECT_REF, NULL_TREE,
3183 convert_to_fat_pointer (TREE_TYPE (type),
3184 build_unary_op (ADDR_EXPR,
3187 /* Do something very similar for converting one unconstrained
3188 array to another. */
3189 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3191 build_unary_op (INDIRECT_REF, NULL_TREE,
3192 convert (TREE_TYPE (type),
3193 build_unary_op (ADDR_EXPR,
3199 return fold (convert_to_complex (type, expr));
3206 /* Remove all conversions that are done in EXP. This includes converting
3207 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3208 is nonzero, always return the address of the containing object even if
3209 the address is not bit-aligned. */
3212 remove_conversions (tree exp, int true_address)
3214 switch (TREE_CODE (exp))
3218 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3219 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3220 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3224 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3225 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3226 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3229 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3230 case NOP_EXPR: case CONVERT_EXPR:
3231 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3240 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3241 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3242 likewise return an expression pointing to the underlying array. */
3245 maybe_unconstrained_array (tree exp)
3247 enum tree_code code = TREE_CODE (exp);
3250 switch (TREE_CODE (TREE_TYPE (exp)))
3252 case UNCONSTRAINED_ARRAY_TYPE:
3253 if (code == UNCONSTRAINED_ARRAY_REF)
3256 = build_unary_op (INDIRECT_REF, NULL_TREE,
3257 build_component_ref (TREE_OPERAND (exp, 0),
3258 get_identifier ("P_ARRAY"),
3260 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3264 else if (code == NULL_EXPR)
3265 return build1 (NULL_EXPR,
3266 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3267 (TREE_TYPE (TREE_TYPE (exp))))),
3268 TREE_OPERAND (exp, 0));
3271 /* If this is a padded type, convert to the unpadded type and see if
3272 it contains a template. */
3273 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3275 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3276 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3277 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3279 build_component_ref (new, NULL_TREE,
3280 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3283 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3285 build_component_ref (exp, NULL_TREE,
3286 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3296 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3297 If NOTRUNC_P is set, truncation operations should be suppressed. */
3300 unchecked_convert (tree type, tree expr, int notrunc_p)
3302 tree etype = TREE_TYPE (expr);
3304 /* If the expression is already the right type, we are done. */
3308 /* If both types types are integral just do a normal conversion.
3309 Likewise for a conversion to an unconstrained array. */
3310 if ((((INTEGRAL_TYPE_P (type)
3311 && ! (TREE_CODE (type) == INTEGER_TYPE
3312 && TYPE_VAX_FLOATING_POINT_P (type)))
3313 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3314 || (TREE_CODE (type) == RECORD_TYPE
3315 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3316 && ((INTEGRAL_TYPE_P (etype)
3317 && ! (TREE_CODE (etype) == INTEGER_TYPE
3318 && TYPE_VAX_FLOATING_POINT_P (etype)))
3319 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3320 || (TREE_CODE (etype) == RECORD_TYPE
3321 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3322 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3326 if (TREE_CODE (etype) == INTEGER_TYPE
3327 && TYPE_BIASED_REPRESENTATION_P (etype))
3329 tree ntype = copy_type (etype);
3331 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3332 TYPE_MAIN_VARIANT (ntype) = ntype;
3333 expr = build1 (NOP_EXPR, ntype, expr);
3336 if (TREE_CODE (type) == INTEGER_TYPE
3337 && TYPE_BIASED_REPRESENTATION_P (type))
3339 rtype = copy_type (type);
3340 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3341 TYPE_MAIN_VARIANT (rtype) = rtype;
3344 expr = convert (rtype, expr);
3346 expr = build1 (NOP_EXPR, type, expr);
3349 /* If we are converting TO an integral type whose precision is not the
3350 same as its size, first unchecked convert to a record that contains
3351 an object of the output type. Then extract the field. */
3352 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3353 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3354 GET_MODE_BITSIZE (TYPE_MODE (type))))
3356 tree rec_type = make_node (RECORD_TYPE);
3357 tree field = create_field_decl (get_identifier ("OBJ"), type,
3358 rec_type, 1, 0, 0, 0);
3360 TYPE_FIELDS (rec_type) = field;
3361 layout_type (rec_type);
3363 expr = unchecked_convert (rec_type, expr, notrunc_p);
3364 expr = build_component_ref (expr, NULL_TREE, field, 0);
3367 /* Similarly for integral input type whose precision is not equal to its
3369 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3370 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3371 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3373 tree rec_type = make_node (RECORD_TYPE);
3375 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3378 TYPE_FIELDS (rec_type) = field;
3379 layout_type (rec_type);
3381 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3382 expr = unchecked_convert (type, expr, notrunc_p);
3385 /* We have a special case when we are converting between two
3386 unconstrained array types. In that case, take the address,
3387 convert the fat pointer types, and dereference. */
3388 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3389 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3390 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3391 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3392 build_unary_op (ADDR_EXPR, NULL_TREE,
3396 expr = maybe_unconstrained_array (expr);
3397 etype = TREE_TYPE (expr);
3398 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3401 /* If the result is an integral type whose size is not equal to
3402 the size of the underlying machine type, sign- or zero-extend
3403 the result. We need not do this in the case where the input is
3404 an integral type of the same precision and signedness or if the output
3405 is a biased type or if both the input and output are unsigned. */
3407 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3408 && ! (TREE_CODE (type) == INTEGER_TYPE
3409 && TYPE_BIASED_REPRESENTATION_P (type))
3410 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3411 GET_MODE_BITSIZE (TYPE_MODE (type)))
3412 && ! (INTEGRAL_TYPE_P (etype)
3413 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3414 && operand_equal_p (TYPE_RM_SIZE (type),
3415 (TYPE_RM_SIZE (etype) != 0
3416 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3418 && ! (TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3420 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3421 TYPE_UNSIGNED (type));
3423 = convert (base_type,
3424 size_binop (MINUS_EXPR,
3426 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3427 TYPE_RM_SIZE (type)));
3430 build_binary_op (RSHIFT_EXPR, base_type,
3431 build_binary_op (LSHIFT_EXPR, base_type,
3432 convert (base_type, expr),
3437 /* An unchecked conversion should never raise Constraint_Error. The code
3438 below assumes that GCC's conversion routines overflow the same way that
3439 the underlying hardware does. This is probably true. In the rare case
3440 when it is false, we can rely on the fact that such conversions are
3441 erroneous anyway. */
3442 if (TREE_CODE (expr) == INTEGER_CST)
3443 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3445 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3446 show no longer constant. */
3447 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3448 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3450 TREE_CONSTANT (expr) = 0;
3455 #include "gt-ada-utils.h"
3456 #include "gtype-ada.h"