1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
26 ****************************************************************************/
53 #ifndef MAX_FIXED_MODE_SIZE
54 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
57 #ifndef MAX_BITS_PER_WORD
58 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 /* If nonzero, pretend we are allocating at global level. */
64 /* Tree nodes for the various types and decls we create. */
65 tree gnat_std_decls[(int) ADT_LAST];
67 /* Functions to call for each of the possible raise reasons. */
68 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
70 /* Associates a GNAT tree node to a GCC tree node. It is used in
71 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
72 of `save_gnu_tree' for more info. */
73 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
75 /* This listhead is used to record any global objects that need elaboration.
76 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
77 initial value to assign. */
79 static GTY(()) tree pending_elaborations;
81 /* This stack allows us to momentarily switch to generating elaboration
82 lists for an inner context. */
84 struct e_stack GTY(()) {
88 static GTY(()) struct e_stack *elist_stack;
90 /* This variable keeps a table for types for each precision so that we only
91 allocate each of them once. Signed and unsigned types are kept separate.
93 Note that these types are only used when fold-const requests something
94 special. Perhaps we should NOT share these types; we'll see how it
96 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
98 /* Likewise for float types, but record these by mode. */
99 static GTY(()) tree float_types[NUM_MACHINE_MODES];
101 /* For each binding contour we allocate a binding_level structure which records
102 the entities defined or declared in that contour. Contours include:
105 one for each subprogram definition
106 one for each compound statement (declare block)
108 Binding contours are used to create GCC tree BLOCK nodes. */
110 struct binding_level GTY(())
112 /* A chain of ..._DECL nodes for all variables, constants, functions,
113 parameters and type declarations. These ..._DECL nodes are chained
114 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
115 in the reverse of the order supplied to be compatible with the
118 /* For each level (except the global one), a chain of BLOCK nodes for all
119 the levels that were entered and exited one level down from this one. */
121 /* The BLOCK node for this level, if one has been preallocated.
122 If 0, the BLOCK is allocated (if needed) when the level is popped. */
124 /* The binding level containing this one (the enclosing binding level). */
125 struct binding_level *level_chain;
128 /* The binding level currently in effect. */
129 static GTY(()) struct binding_level *current_binding_level;
131 /* A chain of binding_level structures awaiting reuse. */
132 static GTY((deletable (""))) struct binding_level *free_binding_level;
134 /* The outermost binding level. This binding level is created when the
135 compiler is started and it will exist through the entire compilation. */
136 static struct binding_level *global_binding_level;
138 /* Binding level structures are initialized by copying this one. */
139 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
141 struct language_function GTY(())
146 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
147 static tree compute_related_constant PARAMS ((tree, tree));
148 static tree split_plus PARAMS ((tree, tree *));
149 static int value_zerop PARAMS ((tree));
150 static tree float_type_for_size PARAMS ((int, enum machine_mode));
151 static tree convert_to_fat_pointer PARAMS ((tree, tree));
152 static tree convert_to_thin_pointer PARAMS ((tree, tree));
153 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
156 /* Initialize the association of GNAT nodes to GCC trees. */
163 associate_gnat_to_gnu = (tree *) ggc_alloc (max_gnat_nodes * sizeof (tree));
165 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
166 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
168 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
171 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
172 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
173 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
175 If GNU_DECL is zero, a previous association is to be reset. */
178 save_gnu_tree (gnat_entity, gnu_decl, no_check)
179 Entity_Id gnat_entity;
184 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
185 || (! no_check && ! DECL_P (gnu_decl))))
188 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
191 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
192 Return the ..._DECL node that was associated with it. If there is no tree
193 node associated with GNAT_ENTITY, abort.
195 In some cases, such as delayed elaboration or expressions that need to
196 be elaborated only once, GNAT_ENTITY is really not an entity. */
199 get_gnu_tree (gnat_entity)
200 Entity_Id gnat_entity;
202 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
205 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
208 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
211 present_gnu_tree (gnat_entity)
212 Entity_Id gnat_entity;
214 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
218 /* Return non-zero if we are currently in the global binding level. */
223 return (force_global != 0 || current_binding_level == global_binding_level
227 /* Return the list of declarations in the current level. Note that this list
228 is in reverse order (it has to be so for back-end compatibility). */
233 return current_binding_level->names;
236 /* Nonzero if the current level needs to have a BLOCK made. */
241 return (current_binding_level->names != 0);
244 /* Enter a new binding level. The input parameter is ignored, but has to be
245 specified for back-end compatibility. */
249 int ignore ATTRIBUTE_UNUSED;
251 struct binding_level *newlevel = NULL;
253 /* Reuse a struct for this binding level, if there is one. */
254 if (free_binding_level)
256 newlevel = free_binding_level;
257 free_binding_level = free_binding_level->level_chain;
261 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
263 *newlevel = clear_binding_level;
265 /* Add this level to the front of the chain (stack) of levels that are
267 newlevel->level_chain = current_binding_level;
268 current_binding_level = newlevel;
271 /* Exit a binding level.
272 Pop the level off, and restore the state of the identifier-decl mappings
273 that were in effect when this level was entered.
275 If KEEP is nonzero, this level had explicit declarations, so
276 and create a "block" (a BLOCK node) for the level
277 to record its declarations and subblocks for symbol table output.
279 If FUNCTIONBODY is nonzero, this level is the body of a function,
280 so create a block as if KEEP were set and also clear out all
283 If REVERSE is nonzero, reverse the order of decls before putting
284 them into the BLOCK. */
287 poplevel (keep, reverse, functionbody)
292 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
293 binding level that we are about to exit and which is returned by this
295 tree block = NULL_TREE;
298 tree subblock_chain = current_binding_level->blocks;
300 int block_previously_created;
302 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
303 nodes chained through the `names' field of current_binding_level are in
304 reverse order except for PARM_DECL node, which are explicitly stored in
306 current_binding_level->names
307 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
308 : current_binding_level->names;
310 /* Output any nested inline functions within this block which must be
311 compiled because their address is needed. */
312 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
313 if (TREE_CODE (decl_node) == FUNCTION_DECL
314 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
315 && DECL_INITIAL (decl_node) != 0)
317 push_function_context ();
318 output_inline_function (decl_node);
319 pop_function_context ();
323 block_previously_created = (current_binding_level->this_block != 0);
324 if (block_previously_created)
325 block = current_binding_level->this_block;
326 else if (keep || functionbody)
327 block = make_node (BLOCK);
330 BLOCK_VARS (block) = keep ? decl_chain : 0;
331 BLOCK_SUBBLOCKS (block) = subblock_chain;
334 /* Record the BLOCK node just built as the subblock its enclosing scope. */
335 for (subblock_node = subblock_chain; subblock_node;
336 subblock_node = TREE_CHAIN (subblock_node))
337 BLOCK_SUPERCONTEXT (subblock_node) = block;
339 /* Clear out the meanings of the local variables of this level. */
341 for (subblock_node = decl_chain; subblock_node;
342 subblock_node = TREE_CHAIN (subblock_node))
343 if (DECL_NAME (subblock_node) != 0)
344 /* If the identifier was used or addressed via a local extern decl,
345 don't forget that fact. */
346 if (DECL_EXTERNAL (subblock_node))
348 if (TREE_USED (subblock_node))
349 TREE_USED (DECL_NAME (subblock_node)) = 1;
350 if (TREE_ADDRESSABLE (subblock_node))
351 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
355 /* Pop the current level, and free the structure for reuse. */
356 struct binding_level *level = current_binding_level;
357 current_binding_level = current_binding_level->level_chain;
358 level->level_chain = free_binding_level;
359 free_binding_level = level;
364 /* This is the top level block of a function. The ..._DECL chain stored
365 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
366 leave them in the BLOCK because they are found in the FUNCTION_DECL
368 DECL_INITIAL (current_function_decl) = block;
369 BLOCK_VARS (block) = 0;
373 if (!block_previously_created)
374 current_binding_level->blocks
375 = chainon (current_binding_level->blocks, block);
378 /* If we did not make a block for the level just exited, any blocks made for
379 inner levels (since they cannot be recorded as subblocks in that level)
380 must be carried forward so they will later become subblocks of something
382 else if (subblock_chain)
383 current_binding_level->blocks
384 = chainon (current_binding_level->blocks, subblock_chain);
386 TREE_USED (block) = 1;
391 /* Insert BLOCK at the end of the list of subblocks of the
392 current binding level. This is used when a BIND_EXPR is expanded,
393 to handle the BLOCK node inside the BIND_EXPR. */
399 TREE_USED (block) = 1;
400 current_binding_level->blocks
401 = chainon (current_binding_level->blocks, block);
404 /* Set the BLOCK node for the innermost scope
405 (the one we are currently in). */
411 current_binding_level->this_block = block;
412 current_binding_level->names = chainon (current_binding_level->names,
414 current_binding_level->blocks = chainon (current_binding_level->blocks,
415 BLOCK_SUBBLOCKS (block));
418 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
419 Returns the ..._DECL node. */
425 struct binding_level *b;
427 /* If at top level, there is no context. But PARM_DECLs always go in the
428 level of its function. */
429 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
431 b = global_binding_level;
432 DECL_CONTEXT (decl) = 0;
436 b = current_binding_level;
437 DECL_CONTEXT (decl) = current_function_decl;
440 /* Put the declaration on the list. The list of declarations is in reverse
441 order. The list will be reversed later if necessary. This needs to be
442 this way for compatibility with the back-end.
444 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
445 will cause trouble with the debugger and aren't needed anyway. */
446 if (TREE_CODE (decl) != TYPE_DECL
447 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
449 TREE_CHAIN (decl) = b->names;
453 /* For the declaration of a type, set its name if it either is not already
454 set, was set to an IDENTIFIER_NODE, indicating an internal name,
455 or if the previous type name was not derived from a source name.
456 We'd rather have the type named with a real name and all the pointer
457 types to the same object have the same POINTER_TYPE node. Code in this
458 function in c-decl.c makes a copy of the type node here, but that may
459 cause us trouble with incomplete types, so let's not try it (at least
462 if (TREE_CODE (decl) == TYPE_DECL
463 && DECL_NAME (decl) != 0
464 && (TYPE_NAME (TREE_TYPE (decl)) == 0
465 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
466 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
467 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
468 && ! DECL_ARTIFICIAL (decl))))
469 TYPE_NAME (TREE_TYPE (decl)) = decl;
474 /* Do little here. Set up the standard declarations later after the
475 front end has been run. */
478 gnat_init_decl_processing ()
482 /* Make the binding_level structure for global names. */
483 current_function_decl = 0;
484 current_binding_level = 0;
485 free_binding_level = 0;
487 global_binding_level = current_binding_level;
489 build_common_tree_nodes (0);
491 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
492 corresponding to the size of ptr_mode. Make this here since we need
493 this before we can expand the GNAT types. */
494 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
495 build_common_tree_nodes_2 (0);
497 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
499 /* We need to make the integer type before doing anything else.
500 We stitch this in to the appropriate GNAT type later. */
501 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
506 ptr_void_type_node = build_pointer_type (void_type_node);
510 /* Create the predefined scalar types such as `integer_type_node' needed
511 in the gcc back-end and initialize the global binding level. */
514 init_gigi_decls (long_long_float_type, exception_type)
515 tree long_long_float_type, exception_type;
520 /* Set the types that GCC and Gigi use from the front end. We would like
521 to do this for char_type_node, but it needs to correspond to the C
523 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
525 /* In this case, the builtin floating point types are VAX float,
526 so make up a type for use. */
527 longest_float_type_node = make_node (REAL_TYPE);
528 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
529 layout_type (longest_float_type_node);
530 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
531 longest_float_type_node));
534 longest_float_type_node = TREE_TYPE (long_long_float_type);
536 except_type_node = TREE_TYPE (exception_type);
538 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
539 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
540 unsigned_type_node));
543 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
546 void_ftype = build_function_type (void_type_node, NULL_TREE);
547 ptr_void_ftype = build_pointer_type (void_ftype);
549 /* Now declare runtime functions. */
550 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
552 /* malloc is a function declaration tree for a function to allocate
554 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
556 build_function_type (ptr_void_type_node,
557 tree_cons (NULL_TREE,
560 NULL_TREE, 0, 1, 1, 0);
562 /* free is a function declaration tree for a function to free memory. */
565 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
566 build_function_type (void_type_node,
567 tree_cons (NULL_TREE,
570 NULL_TREE, 0, 1, 1, 0);
572 /* Make the types and functions used for exception processing. */
574 = build_array_type (gnat_type_for_mode (Pmode, 0),
575 build_index_type (build_int_2 (5, 0)));
576 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
577 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
579 /* Functions to get and set the jumpbuf pointer for the current thread. */
581 = create_subprog_decl
582 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
583 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
584 NULL_TREE, 0, 1, 1, 0);
587 = create_subprog_decl
588 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
590 build_function_type (void_type_node,
591 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
592 NULL_TREE, 0, 1, 1, 0);
594 /* Function to get the current exception. */
596 = create_subprog_decl
597 (get_identifier ("system__soft_links__get_gnat_exception"),
599 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
600 NULL_TREE, 0, 1, 1, 0);
602 /* Functions that raise exceptions. */
604 = create_subprog_decl
605 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
606 build_function_type (void_type_node,
607 tree_cons (NULL_TREE,
608 build_pointer_type (except_type_node),
610 NULL_TREE, 0, 1, 1, 0);
612 /* If in no exception handlers mode, all raise statements are redirected to
613 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
614 this procedure will never be called in this mode. */
615 if (No_Exception_Handlers_Set ())
618 = create_subprog_decl
619 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
620 build_function_type (void_type_node,
621 tree_cons (NULL_TREE,
622 build_pointer_type (char_type_node),
623 tree_cons (NULL_TREE,
626 NULL_TREE, 0, 1, 1, 0);
628 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0];
630 gnat_raise_decls[i] = decl;
633 /* Otherwise, make one decl for each exception reason. */
634 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
638 sprintf (name, "__gnat_rcheck_%.2d", i);
640 = create_subprog_decl
641 (get_identifier (name), NULL_TREE,
642 build_function_type (void_type_node,
643 tree_cons (NULL_TREE,
646 tree_cons (NULL_TREE,
649 NULL_TREE, 0, 1, 1, 0);
652 /* Indicate that these never return. */
653 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
654 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
655 TREE_TYPE (raise_nodefer_decl)
656 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
659 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
661 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
662 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
663 TREE_TYPE (gnat_raise_decls[i])
664 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
668 /* setjmp returns an integer and has one operand, which is a pointer to
671 = create_subprog_decl
672 (get_identifier ("setjmp"), NULL_TREE,
673 build_function_type (integer_type_node,
674 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
675 NULL_TREE, 0, 1, 1, 0);
677 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
678 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
680 main_identifier_node = get_identifier ("main");
683 /* This function is called indirectly from toplev.c to handle incomplete
684 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
685 compile_file in toplev.c makes an indirect call through the function pointer
686 incomplete_decl_finalize_hook which is initialized to this routine in
687 init_decl_processing. */
690 gnat_finish_incomplete_decl (dont_care)
691 tree dont_care ATTRIBUTE_UNUSED;
696 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
697 nodes (FIELDLIST), finish constructing the record or union type.
698 If HAS_REP is nonzero, this record has a rep clause; don't call
699 layout_type but merely set the size and alignment ourselves.
700 If DEFER_DEBUG is nonzero, do not call the debugging routines
701 on this type; it will be done later. */
704 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
710 enum tree_code code = TREE_CODE (record_type);
711 tree ada_size = bitsize_zero_node;
712 tree size = bitsize_zero_node;
713 tree size_unit = size_zero_node;
717 TYPE_FIELDS (record_type) = fieldlist;
719 if (TYPE_NAME (record_type) != 0
720 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
721 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
723 TYPE_STUB_DECL (record_type)
724 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
727 /* We don't need both the typedef name and the record name output in
728 the debugging information, since they are the same. */
729 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
731 /* Globally initialize the record first. If this is a rep'ed record,
732 that just means some initializations; otherwise, layout the record. */
736 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
737 TYPE_MODE (record_type) = BLKmode;
738 if (TYPE_SIZE (record_type) == 0)
740 TYPE_SIZE (record_type) = bitsize_zero_node;
741 TYPE_SIZE_UNIT (record_type) = size_zero_node;
746 /* Ensure there isn't a size already set. There can be in an error
747 case where there is a rep clause but all fields have errors and
748 no longer have a position. */
749 TYPE_SIZE (record_type) = 0;
750 layout_type (record_type);
753 /* At this point, the position and size of each field is known. It was
754 either set before entry by a rep clause, or by laying out the type
755 above. We now make a pass through the fields (in reverse order for
756 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
757 (for rep'ed records that are not padding types); and the mode (for
760 if (code == QUAL_UNION_TYPE)
761 fieldlist = nreverse (fieldlist);
763 for (field = fieldlist; field; field = TREE_CHAIN (field))
765 tree type = TREE_TYPE (field);
766 tree this_size = DECL_SIZE (field);
767 tree this_size_unit = DECL_SIZE_UNIT (field);
768 tree this_ada_size = DECL_SIZE (field);
770 /* We need to make an XVE/XVU record if any field has variable size,
771 whether or not the record does. For example, if we have an union,
772 it may be that all fields, rounded up to the alignment, have the
773 same size, in which case we'll use that size. But the debug
774 output routines (except Dwarf2) won't be able to output the fields,
775 so we need to make the special record. */
776 if (TREE_CODE (this_size) != INTEGER_CST)
779 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
780 || TREE_CODE (type) == QUAL_UNION_TYPE)
781 && ! TYPE_IS_FAT_POINTER_P (type)
782 && ! TYPE_CONTAINS_TEMPLATE_P (type)
783 && TYPE_ADA_SIZE (type) != 0)
784 this_ada_size = TYPE_ADA_SIZE (type);
786 if (has_rep && ! DECL_BIT_FIELD (field))
787 TYPE_ALIGN (record_type)
788 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
793 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
794 size = size_binop (MAX_EXPR, size, this_size);
795 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
798 case QUAL_UNION_TYPE:
800 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
801 this_ada_size, ada_size));
802 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
804 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
805 this_size_unit, size_unit));
809 /* Since we know here that all fields are sorted in order of
810 increasing bit position, the size of the record is one
811 higher than the ending bit of the last field processed
812 unless we have a rep clause, since in that case we might
813 have a field outside a QUAL_UNION_TYPE that has a higher ending
814 position. So use a MAX in that case. Also, if this field is a
815 QUAL_UNION_TYPE, we need to take into account the previous size in
816 the case of empty variants. */
818 = merge_sizes (ada_size, bit_position (field), this_ada_size,
819 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
820 size = merge_sizes (size, bit_position (field), this_size,
821 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
823 = merge_sizes (size_unit, byte_position (field), this_size_unit,
824 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
832 if (code == QUAL_UNION_TYPE)
833 nreverse (fieldlist);
835 /* If this is a padding record, we never want to make the size smaller than
836 what was specified in it, if any. */
837 if (TREE_CODE (record_type) == RECORD_TYPE
838 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
840 size = TYPE_SIZE (record_type);
841 size_unit = TYPE_SIZE_UNIT (record_type);
844 /* Now set any of the values we've just computed that apply. */
845 if (! TYPE_IS_FAT_POINTER_P (record_type)
846 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
847 SET_TYPE_ADA_SIZE (record_type, ada_size);
849 #ifdef ROUND_TYPE_SIZE
850 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
851 size_unit = ROUND_TYPE_SIZE_UNIT (record_type, size_unit,
852 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
854 size = round_up (size, TYPE_ALIGN (record_type));
855 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
859 && ! (TREE_CODE (record_type) == RECORD_TYPE
860 && TYPE_IS_PADDING_P (record_type)
861 && TREE_CODE (size) != INTEGER_CST
862 && contains_placeholder_p (size)))
864 TYPE_SIZE (record_type) = size;
865 TYPE_SIZE_UNIT (record_type) = size_unit;
869 compute_record_mode (record_type);
873 /* If this record is of variable size, rename it so that the
874 debugger knows it is and make a new, parallel, record
875 that tells the debugger how the record is laid out. See
880 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
881 ? UNION_TYPE : TREE_CODE (record_type));
882 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
884 = concat_id_with_name (orig_id,
885 TREE_CODE (record_type) == QUAL_UNION_TYPE
887 tree last_pos = bitsize_zero_node;
890 TYPE_NAME (new_record_type) = new_id;
891 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
892 TYPE_STUB_DECL (new_record_type)
893 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
894 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
895 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
896 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
897 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
899 /* Now scan all the fields, replacing each field with a new
900 field corresponding to the new encoding. */
901 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
902 old_field = TREE_CHAIN (old_field))
904 tree field_type = TREE_TYPE (old_field);
905 tree field_name = DECL_NAME (old_field);
907 tree curpos = bit_position (old_field);
909 unsigned int align = 0;
912 /* See how the position was modified from the last position.
914 There are two basic cases we support: a value was added
915 to the last position or the last position was rounded to
916 a boundary and they something was added. Check for the
917 first case first. If not, see if there is any evidence
918 of rounding. If so, round the last position and try
921 If this is a union, the position can be taken as zero. */
923 if (TREE_CODE (new_record_type) == UNION_TYPE)
924 pos = bitsize_zero_node, align = 0;
926 pos = compute_related_constant (curpos, last_pos);
928 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
929 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
931 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
932 pos = compute_related_constant (curpos,
933 round_up (last_pos, align));
935 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
936 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
937 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
938 && host_integerp (TREE_OPERAND
939 (TREE_OPERAND (curpos, 0), 1),
944 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
945 pos = compute_related_constant (curpos,
946 round_up (last_pos, align));
949 /* If we can't compute a position, set it to zero.
951 ??? We really should abort here, but it's too much work
952 to get this correct for all cases. */
955 pos = bitsize_zero_node;
957 /* See if this type is variable-size and make a new type
958 and indicate the indirection if so. */
959 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
961 field_type = build_pointer_type (field_type);
965 /* Make a new field name, if necessary. */
966 if (var || align != 0)
971 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
972 align / BITS_PER_UNIT);
974 strcpy (suffix, "XVL");
976 field_name = concat_id_with_name (field_name, suffix);
979 new_field = create_field_decl (field_name, field_type,
981 DECL_SIZE (old_field), pos, 0);
982 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
983 TYPE_FIELDS (new_record_type) = new_field;
985 /* If old_field is a QUAL_UNION_TYPE, take its size as being
986 zero. The only time it's not the last field of the record
987 is when there are other components at fixed positions after
988 it (meaning there was a rep clause for every field) and we
989 want to be able to encode them. */
990 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
991 (TREE_CODE (TREE_TYPE (old_field))
994 : DECL_SIZE (old_field));
997 TYPE_FIELDS (new_record_type)
998 = nreverse (TYPE_FIELDS (new_record_type));
1000 rest_of_type_compilation (new_record_type, global_bindings_p ());
1003 rest_of_type_compilation (record_type, global_bindings_p ());
1007 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1008 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1009 if this represents a QUAL_UNION_TYPE in which case we must look for
1010 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1011 is nonzero, we must take the MAX of the end position of this field
1012 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1014 We return an expression for the size. */
1017 merge_sizes (last_size, first_bit, size, special, has_rep)
1019 tree first_bit, size;
1023 tree type = TREE_TYPE (last_size);
1025 if (! special || TREE_CODE (size) != COND_EXPR)
1027 tree new = size_binop (PLUS_EXPR, first_bit, size);
1030 new = size_binop (MAX_EXPR, last_size, new);
1035 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1036 integer_zerop (TREE_OPERAND (size, 1))
1037 ? last_size : merge_sizes (last_size, first_bit,
1038 TREE_OPERAND (size, 1),
1040 integer_zerop (TREE_OPERAND (size, 2))
1041 ? last_size : merge_sizes (last_size, first_bit,
1042 TREE_OPERAND (size, 2),
1046 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1047 related by the addition of a constant. Return that constant if so. */
1050 compute_related_constant (op0, op1)
1053 tree op0_var, op1_var;
1054 tree op0_con = split_plus (op0, &op0_var);
1055 tree op1_con = split_plus (op1, &op1_var);
1056 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1058 if (operand_equal_p (op0_var, op1_var, 0))
1060 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1066 /* Utility function of above to split a tree OP which may be a sum, into a
1067 constant part, which is returned, and a variable part, which is stored
1068 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1072 split_plus (in, pvar)
1076 tree result = bitsize_zero_node;
1078 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1079 in = TREE_OPERAND (in, 0);
1082 if (TREE_CODE (in) == INTEGER_CST)
1084 *pvar = bitsize_zero_node;
1087 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1089 tree lhs_var, rhs_var;
1090 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1091 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1093 result = size_binop (PLUS_EXPR, result, lhs_con);
1094 result = size_binop (TREE_CODE (in), result, rhs_con);
1096 if (lhs_var == TREE_OPERAND (in, 0)
1097 && rhs_var == TREE_OPERAND (in, 1))
1098 return bitsize_zero_node;
1100 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1104 return bitsize_zero_node;
1107 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1108 subprogram. If it is void_type_node, then we are dealing with a procedure,
1109 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1110 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1111 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1112 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1113 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1114 RETURNS_WITH_DSP is nonzero if the function is to return with a
1115 depressed stack pointer. */
1118 create_subprog_type (return_type, param_decl_list, cico_list,
1119 returns_unconstrained, returns_by_ref, returns_with_dsp)
1121 tree param_decl_list;
1123 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1125 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1126 the subprogram formal parameters. This list is generated by traversing the
1127 input list of PARM_DECL nodes. */
1128 tree param_type_list = NULL;
1132 for (param_decl = param_decl_list; param_decl;
1133 param_decl = TREE_CHAIN (param_decl))
1134 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1137 /* The list of the function parameter types has to be terminated by the void
1138 type to signal to the back-end that we are not dealing with a variable
1139 parameter subprogram, but that the subprogram has a fixed number of
1141 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1143 /* The list of argument types has been created in reverse
1145 param_type_list = nreverse (param_type_list);
1147 type = build_function_type (return_type, param_type_list);
1149 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1150 or the new type should, make a copy of TYPE. Likewise for
1151 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1152 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1153 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1154 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1155 type = copy_type (type);
1157 SET_TYPE_CI_CO_LIST (type, cico_list);
1158 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1159 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1160 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1164 /* Return a copy of TYPE but safe to modify in any way. */
1170 tree new = copy_node (type);
1172 /* copy_node clears this field instead of copying it, because it is
1173 aliased with TREE_CHAIN. */
1174 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1176 TYPE_POINTER_TO (new) = 0;
1177 TYPE_REFERENCE_TO (new) = 0;
1178 TYPE_MAIN_VARIANT (new) = new;
1179 TYPE_NEXT_VARIANT (new) = 0;
1184 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1185 TYPE_INDEX_TYPE is INDEX. */
1188 create_index_type (min, max, index)
1192 /* First build a type for the desired range. */
1193 tree type = build_index_2_type (min, max);
1195 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1196 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1197 is set, but not to INDEX, make a copy of this type with the requested
1198 index type. Note that we have no way of sharing these types, but that's
1199 only a small hole. */
1200 if (TYPE_INDEX_TYPE (type) == index)
1202 else if (TYPE_INDEX_TYPE (type) != 0)
1203 type = copy_type (type);
1205 SET_TYPE_INDEX_TYPE (type, index);
1209 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1210 string) and TYPE is a ..._TYPE node giving its data type.
1211 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1212 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1213 information about this type. */
1216 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1219 struct attrib *attr_list;
1223 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1224 enum tree_code code = TREE_CODE (type);
1226 DECL_ARTIFICIAL (type_decl) = artificial_p;
1227 pushdecl (type_decl);
1228 process_attributes (type_decl, attr_list);
1230 /* Pass type declaration information to the debugger unless this is an
1231 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1232 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1233 a dummy type, which will be completed later, or a type for which
1234 debugging information was not requested. */
1235 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1237 DECL_IGNORED_P (type_decl) = 1;
1238 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1239 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1240 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1241 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1246 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1247 ASM_NAME is its assembler name (if provided). TYPE is its data type
1248 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1249 expression; NULL_TREE if none.
1251 CONST_FLAG is nonzero if this variable is constant.
1253 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1254 the current compilation unit. This flag should be set when processing the
1255 variable definitions in a package specification. EXTERN_FLAG is nonzero
1256 when processing an external variable declaration (as opposed to a
1257 definition: no storage is to be allocated for the variable here).
1259 STATIC_FLAG is only relevant when not at top level. In that case
1260 it indicates whether to always allocate storage to the variable. */
1263 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1264 extern_flag, static_flag, attr_list)
1273 struct attrib *attr_list;
1278 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1279 && (global_bindings_p () || static_flag
1280 ? 0 != initializer_constant_valid_p (var_init,
1281 TREE_TYPE (var_init))
1282 : TREE_CONSTANT (var_init))));
1284 = build_decl ((const_flag && init_const
1285 /* Only make a CONST_DECL for sufficiently-small objects.
1286 We consider complex double "sufficiently-small" */
1287 && TYPE_SIZE (type) != 0
1288 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1289 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1290 GET_MODE_SIZE (DCmode)))
1291 ? CONST_DECL : VAR_DECL, var_name, type);
1292 tree assign_init = 0;
1294 /* If this is external, throw away any initializations unless this is a
1295 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1296 we are defining a global here, leave a constant initialization and save
1297 any variable elaborations for the elaboration routine. Otherwise, if
1298 the initializing expression is not the same as TYPE, generate the
1299 initialization with an assignment statement, since it knows how
1300 to do the required adjustents. If we are just annotating types,
1301 throw away the initialization if it isn't a constant. */
1303 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1304 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1307 if (global_bindings_p () && var_init != 0 && ! init_const)
1309 add_pending_elaborations (var_decl, var_init);
1313 else if (var_init != 0
1314 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1315 != TYPE_MAIN_VARIANT (type))
1316 || (static_flag && ! init_const)))
1317 assign_init = var_init, var_init = 0;
1319 DECL_COMMON (var_decl) = !flag_no_common;
1320 DECL_INITIAL (var_decl) = var_init;
1321 TREE_READONLY (var_decl) = const_flag;
1322 DECL_EXTERNAL (var_decl) = extern_flag;
1323 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1324 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1325 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1326 = TYPE_VOLATILE (type);
1328 /* At the global binding level we need to allocate static storage for the
1329 variable if and only if its not external. If we are not at the top level
1330 we allocate automatic storage unless requested not to. */
1331 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1334 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1336 process_attributes (var_decl, attr_list);
1338 /* Add this decl to the current binding level and generate any
1339 needed code and RTL. */
1340 var_decl = pushdecl (var_decl);
1341 expand_decl (var_decl);
1343 if (DECL_CONTEXT (var_decl) != 0)
1344 expand_decl_init (var_decl);
1346 /* If this is volatile, force it into memory. */
1347 if (TREE_SIDE_EFFECTS (var_decl))
1348 gnat_mark_addressable (var_decl);
1350 if (TREE_CODE (var_decl) != CONST_DECL)
1351 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1353 if (assign_init != 0)
1355 /* If VAR_DECL has a padded type, convert it to the unpadded
1356 type so the assignment is done properly. */
1357 tree lhs = var_decl;
1359 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1360 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1361 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1363 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1370 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1371 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1372 this field is in a record type with a "pragma pack". If SIZE is nonzero
1373 it is the specified size for this field. If POS is nonzero, it is the bit
1374 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1375 the address of this field for aliasing purposes. */
1378 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1387 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1389 DECL_CONTEXT (field_decl) = record_type;
1390 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1392 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1393 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1394 If it is a padding type where the inner field is of variable size, it
1395 must be at its natural alignment. Just handle the packed case here; we
1396 will disallow non-aligned rep clauses elsewhere. */
1397 if (packed && TYPE_MODE (field_type) == BLKmode)
1398 DECL_ALIGN (field_decl)
1399 = ((TREE_CODE (field_type) == RECORD_TYPE
1400 && TYPE_IS_PADDING_P (field_type)
1401 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1402 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1404 /* If a size is specified, use it. Otherwise, see if we have a size
1405 to use that may differ from the natural size of the object. */
1407 size = convert (bitsizetype, size);
1410 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1411 TYPE_SIZE (field_type), 0))
1412 size = rm_size (field_type);
1414 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1416 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1417 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1418 size = round_up (size, BITS_PER_UNIT);
1421 /* Make a bitfield if a size is specified for two reasons: first if the size
1422 differs from the natural size. Second, if the alignment is insufficient.
1423 There are a number of ways the latter can be true. But never make a
1424 bitfield if the type of the field has a nonconstant size. */
1426 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1427 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1428 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1430 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1431 bitsize_int (TYPE_ALIGN
1434 || (TYPE_ALIGN (record_type) != 0
1435 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1437 DECL_BIT_FIELD (field_decl) = 1;
1438 DECL_SIZE (field_decl) = size;
1439 if (! packed && pos == 0)
1440 DECL_ALIGN (field_decl)
1441 = (TYPE_ALIGN (record_type) != 0
1442 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1443 : TYPE_ALIGN (field_type));
1446 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1447 DECL_ALIGN (field_decl)
1448 = MAX (DECL_ALIGN (field_decl),
1449 DECL_BIT_FIELD (field_decl) ? 1
1450 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1451 : TYPE_ALIGN (field_type));
1455 /* We need to pass in the alignment the DECL is known to have.
1456 This is the lowest-order bit set in POS, but no more than
1457 the alignment of the record, if one is specified. Note
1458 that an alignment of 0 is taken as infinite. */
1459 unsigned int known_align;
1461 if (host_integerp (pos, 1))
1462 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1464 known_align = BITS_PER_UNIT;
1466 if (TYPE_ALIGN (record_type)
1467 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1468 known_align = TYPE_ALIGN (record_type);
1470 layout_decl (field_decl, known_align);
1471 SET_DECL_OFFSET_ALIGN (field_decl,
1472 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1474 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1475 &DECL_FIELD_BIT_OFFSET (field_decl),
1476 DECL_OFFSET_ALIGN (field_decl), pos);
1478 DECL_HAS_REP_P (field_decl) = 1;
1481 /* If the field type is passed by reference, we will have pointers to the
1482 field, so it is addressable. */
1483 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1486 /* Mark the decl as nonaddressable if it either is indicated so semantically
1487 or if it is a bit field. */
1488 DECL_NONADDRESSABLE_P (field_decl)
1489 = ! addressable || DECL_BIT_FIELD (field_decl);
1494 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1495 effects, has the value of zero. */
1501 if (TREE_CODE (exp) == COMPOUND_EXPR)
1502 return value_zerop (TREE_OPERAND (exp, 1));
1504 return integer_zerop (exp);
1507 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1508 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1509 readonly (either an IN parameter or an address of a pass-by-ref
1513 create_param_decl (param_name, param_type, readonly)
1518 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1520 DECL_ARG_TYPE (param_decl) = param_type;
1521 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1522 TREE_READONLY (param_decl) = readonly;
1526 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1529 process_attributes (decl, attr_list)
1531 struct attrib *attr_list;
1533 for (; attr_list; attr_list = attr_list->next)
1534 switch (attr_list->type)
1536 case ATTR_MACHINE_ATTRIBUTE:
1537 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1539 ATTR_FLAG_TYPE_IN_PLACE);
1542 case ATTR_LINK_ALIAS:
1543 TREE_STATIC (decl) = 1;
1544 assemble_alias (decl, attr_list->name);
1547 case ATTR_WEAK_EXTERNAL:
1549 declare_weak (decl);
1551 post_error ("?weak declarations not supported on this target",
1552 attr_list->error_point);
1555 case ATTR_LINK_SECTION:
1556 #ifdef ASM_OUTPUT_SECTION_NAME
1557 DECL_SECTION_NAME (decl)
1558 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1559 IDENTIFIER_POINTER (attr_list->name));
1560 DECL_COMMON (decl) = 0;
1562 post_error ("?section attributes are not supported for this target",
1563 attr_list->error_point);
1569 /* Add some pending elaborations on the list. */
1572 add_pending_elaborations (var_decl, var_init)
1577 Check_Elaboration_Code_Allowed (error_gnat_node);
1579 pending_elaborations
1580 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1583 /* Obtain any pending elaborations and clear the old list. */
1586 get_pending_elaborations ()
1588 /* Each thing added to the list went on the end; we want it on the
1590 tree result = TREE_CHAIN (pending_elaborations);
1592 TREE_CHAIN (pending_elaborations) = 0;
1596 /* Return nonzero if there are pending elaborations. */
1599 pending_elaborations_p ()
1601 return TREE_CHAIN (pending_elaborations) != 0;
1604 /* Save a copy of the current pending elaboration list and make a new
1608 push_pending_elaborations ()
1610 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1612 p->next = elist_stack;
1613 p->elab_list = pending_elaborations;
1615 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1618 /* Pop the stack of pending elaborations. */
1621 pop_pending_elaborations ()
1623 struct e_stack *p = elist_stack;
1625 pending_elaborations = p->elab_list;
1626 elist_stack = p->next;
1629 /* Return the current position in pending_elaborations so we can insert
1630 elaborations after that point. */
1633 get_elaboration_location ()
1635 return tree_last (pending_elaborations);
1638 /* Insert the current elaborations after ELAB, which is in some elaboration
1642 insert_elaboration_list (elab)
1645 tree next = TREE_CHAIN (elab);
1647 if (TREE_CHAIN (pending_elaborations))
1649 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1650 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1651 TREE_CHAIN (pending_elaborations) = 0;
1655 /* Returns a LABEL_DECL node for LABEL_NAME. */
1658 create_label_decl (label_name)
1661 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1663 DECL_CONTEXT (label_decl) = current_function_decl;
1664 DECL_MODE (label_decl) = VOIDmode;
1665 DECL_SOURCE_LINE (label_decl) = lineno;
1666 DECL_SOURCE_FILE (label_decl) = input_filename;
1671 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1672 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1673 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1674 PARM_DECL nodes chained through the TREE_CHAIN field).
1676 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1677 appropriate fields in the FUNCTION_DECL. */
1680 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1681 inline_flag, public_flag, extern_flag, attr_list)
1685 tree param_decl_list;
1689 struct attrib *attr_list;
1691 tree return_type = TREE_TYPE (subprog_type);
1692 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1694 /* If this is a function nested inside an inlined external function, it
1695 means we aren't going to compile the outer function unless it is
1696 actually inlined, so do the same for us. */
1697 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1698 && DECL_EXTERNAL (current_function_decl))
1701 DECL_EXTERNAL (subprog_decl) = extern_flag;
1702 TREE_PUBLIC (subprog_decl) = public_flag;
1703 DECL_INLINE (subprog_decl) = inline_flag;
1704 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1705 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1706 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1707 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1708 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1711 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1713 process_attributes (subprog_decl, attr_list);
1715 /* Add this decl to the current binding level. */
1716 subprog_decl = pushdecl (subprog_decl);
1718 /* Output the assembler code and/or RTL for the declaration. */
1719 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1721 return subprog_decl;
1724 /* Count how deep we are into nested functions. This is because
1725 we shouldn't call the backend function context routines unless we
1726 are in a nested function. */
1728 static int function_nesting_depth;
1730 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1731 body. This routine needs to be invoked before processing the declarations
1732 appearing in the subprogram. */
1735 begin_subprog_body (subprog_decl)
1738 tree param_decl_list;
1742 if (function_nesting_depth++ != 0)
1743 push_function_context ();
1745 announce_function (subprog_decl);
1747 /* Make this field nonzero so further routines know that this is not
1748 tentative. error_mark_node is replaced below (in poplevel) with the
1750 DECL_INITIAL (subprog_decl) = error_mark_node;
1752 /* This function exists in static storage. This does not mean `static' in
1754 TREE_STATIC (subprog_decl) = 1;
1756 /* Enter a new binding level. */
1757 current_function_decl = subprog_decl;
1760 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1761 subprogram body) so that they can be recognized as local variables in the
1764 The list of PARM_DECL nodes is stored in the right order in
1765 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1766 which they are transmitted to `pushdecl' we need to reverse the list of
1767 PARM_DECLs if we want it to be stored in the right order. The reason why
1768 we want to make sure the PARM_DECLs are stored in the correct order is
1769 that this list will be retrieved in a few lines with a call to `getdecl'
1770 to store it back into the DECL_ARGUMENTS field. */
1771 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1773 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1775 next_param = TREE_CHAIN (param_decl);
1776 TREE_CHAIN (param_decl) = NULL;
1777 pushdecl (param_decl);
1780 /* Store back the PARM_DECL nodes. They appear in the right order. */
1781 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1783 init_function_start (subprog_decl, input_filename, lineno);
1784 expand_function_start (subprog_decl, 0);
1786 /* If this function is `main', emit a call to `__main'
1787 to run global initializers, etc. */
1788 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1789 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1790 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1791 expand_main_function ();
1794 /* Finish the definition of the current subprogram and compile it all the way
1795 to assembler language output. */
1804 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1805 = current_function_decl;
1807 /* Mark the RESULT_DECL as being in this subprogram. */
1808 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1810 expand_function_end (input_filename, lineno, 0);
1812 /* If this is a nested function, push a new GC context. That will keep
1813 local variables on the stack from being collected while we're doing
1814 the compilation of this function. */
1815 if (function_nesting_depth > 1)
1816 ggc_push_context ();
1818 rest_of_compilation (current_function_decl);
1820 if (function_nesting_depth > 1)
1824 /* If we're sure this function is defined in this file then mark it
1826 if (TREE_ASM_WRITTEN (current_function_decl))
1827 mark_fn_defined_in_this_file (current_function_decl);
1830 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1831 not be seen when we call this function and will be in
1832 unallocated memory anyway. */
1833 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1834 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1835 TREE_VALUE (cico_list) = 0;
1837 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1839 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1840 was saved for inline, in which case the DECL_RTLs are in
1841 preserved memory. */
1842 for (decl = DECL_ARGUMENTS (current_function_decl);
1843 decl != 0; decl = TREE_CHAIN (decl))
1845 SET_DECL_RTL (decl, 0);
1846 DECL_INCOMING_RTL (decl) = 0;
1849 /* Similarly, discard DECL_RTL of the return value. */
1850 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1852 /* But DECL_INITIAL must remain nonzero so we know this
1853 was an actual function definition unless toplev.c decided not
1855 if (DECL_INITIAL (current_function_decl) != 0)
1856 DECL_INITIAL (current_function_decl) = error_mark_node;
1858 DECL_ARGUMENTS (current_function_decl) = 0;
1861 /* If we are not at the bottom of the function nesting stack, pop up to
1862 the containing function. Otherwise show we aren't in any function. */
1863 if (--function_nesting_depth != 0)
1864 pop_function_context ();
1866 current_function_decl = 0;
1869 /* Return a definition for a builtin function named NAME and whose data type
1870 is TYPE. TYPE should be a function type with argument types.
1871 FUNCTION_CODE tells later passes how to compile calls to this function.
1872 See tree.h for its possible values.
1874 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1875 the name to be called if we can't opencode the function. */
1878 builtin_function (name, type, function_code, class, library_name)
1882 enum built_in_class class;
1883 const char *library_name;
1885 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1887 DECL_EXTERNAL (decl) = 1;
1888 TREE_PUBLIC (decl) = 1;
1890 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1893 DECL_BUILT_IN_CLASS (decl) = class;
1894 DECL_FUNCTION_CODE (decl) = function_code;
1898 /* Return an integer type with the number of bits of precision given by
1899 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1900 it is a signed type. */
1903 gnat_type_for_size (precision, unsignedp)
1910 if (precision <= 2 * MAX_BITS_PER_WORD
1911 && signed_and_unsigned_types[precision][unsignedp] != 0)
1912 return signed_and_unsigned_types[precision][unsignedp];
1915 t = make_unsigned_type (precision);
1917 t = make_signed_type (precision);
1919 if (precision <= 2 * MAX_BITS_PER_WORD)
1920 signed_and_unsigned_types[precision][unsignedp] = t;
1922 if (TYPE_NAME (t) == 0)
1924 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1925 TYPE_NAME (t) = get_identifier (type_name);
1931 /* Likewise for floating-point types. */
1934 float_type_for_size (precision, mode)
1936 enum machine_mode mode;
1941 if (float_types[(int) mode] != 0)
1942 return float_types[(int) mode];
1944 float_types[(int) mode] = t = make_node (REAL_TYPE);
1945 TYPE_PRECISION (t) = precision;
1948 if (TYPE_MODE (t) != mode)
1951 if (TYPE_NAME (t) == 0)
1953 sprintf (type_name, "FLOAT_%d", precision);
1954 TYPE_NAME (t) = get_identifier (type_name);
1960 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1961 an unsigned type; otherwise a signed type is returned. */
1964 gnat_type_for_mode (mode, unsignedp)
1965 enum machine_mode mode;
1968 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1969 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
1971 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1974 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1977 gnat_unsigned_type (type_node)
1980 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1982 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1984 type = copy_node (type);
1985 TREE_TYPE (type) = type_node;
1987 else if (TREE_TYPE (type_node) != 0
1988 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1989 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1991 type = copy_node (type);
1992 TREE_TYPE (type) = TREE_TYPE (type_node);
1998 /* Return the signed version of a TYPE_NODE, a scalar type. */
2001 gnat_signed_type (type_node)
2004 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2006 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2008 type = copy_node (type);
2009 TREE_TYPE (type) = type_node;
2011 else if (TREE_TYPE (type_node) != 0
2012 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2013 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2015 type = copy_node (type);
2016 TREE_TYPE (type) = TREE_TYPE (type_node);
2022 /* Return a type the same as TYPE except unsigned or signed according to
2026 gnat_signed_or_unsigned_type (unsignedp, type)
2030 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2033 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2036 /* EXP is an expression for the size of an object. If this size contains
2037 discriminant references, replace them with the maximum (if MAX_P) or
2038 minimum (if ! MAX_P) possible value of the discriminant. */
2041 max_size (exp, max_p)
2045 enum tree_code code = TREE_CODE (exp);
2046 tree type = TREE_TYPE (exp);
2048 switch (TREE_CODE_CLASS (code))
2055 if (code == TREE_LIST)
2056 return tree_cons (TREE_PURPOSE (exp),
2057 max_size (TREE_VALUE (exp), max_p),
2058 TREE_CHAIN (exp) != 0
2059 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2063 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2064 modify. Otherwise, we abort since it is something we can't
2066 if (! contains_placeholder_p (exp))
2069 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2071 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2074 return max_p ? size_one_node : size_zero_node;
2079 switch (TREE_CODE_LENGTH (code))
2082 if (code == NON_LVALUE_EXPR)
2083 return max_size (TREE_OPERAND (exp, 0), max_p);
2086 fold (build1 (code, type,
2087 max_size (TREE_OPERAND (exp, 0),
2088 code == NEGATE_EXPR ? ! max_p : max_p)));
2091 if (code == RTL_EXPR)
2093 else if (code == COMPOUND_EXPR)
2094 return max_size (TREE_OPERAND (exp, 1), max_p);
2095 else if (code == WITH_RECORD_EXPR)
2099 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2100 tree rhs = max_size (TREE_OPERAND (exp, 1),
2101 code == MINUS_EXPR ? ! max_p : max_p);
2103 /* Special-case wanting the maximum value of a MIN_EXPR.
2104 In that case, if one side overflows, return the other.
2105 sizetype is signed, but we know sizes are non-negative.
2106 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2107 overflowing or the maximum possible value and the RHS
2109 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2111 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2113 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2114 && (TREE_OVERFLOW (lhs)
2115 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2116 && ! TREE_CONSTANT (rhs))
2119 return fold (build (code, type, lhs, rhs));
2123 if (code == SAVE_EXPR)
2125 else if (code == COND_EXPR)
2126 return fold (build (MAX_EXPR, type,
2127 max_size (TREE_OPERAND (exp, 1), max_p),
2128 max_size (TREE_OPERAND (exp, 2), max_p)));
2129 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2130 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2131 max_size (TREE_OPERAND (exp, 1), max_p));
2138 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2139 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2140 Return a constructor for the template. */
2143 build_template (template_type, array_type, expr)
2148 tree template_elts = NULL_TREE;
2149 tree bound_list = NULL_TREE;
2152 if (TREE_CODE (array_type) == RECORD_TYPE
2153 && (TYPE_IS_PADDING_P (array_type)
2154 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2155 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2157 if (TREE_CODE (array_type) == ARRAY_TYPE
2158 || (TREE_CODE (array_type) == INTEGER_TYPE
2159 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2160 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2162 /* First make the list for a CONSTRUCTOR for the template. Go down the
2163 field list of the template instead of the type chain because this
2164 array might be an Ada array of arrays and we can't tell where the
2165 nested arrays stop being the underlying object. */
2167 for (field = TYPE_FIELDS (template_type); field;
2169 ? (bound_list = TREE_CHAIN (bound_list))
2170 : (array_type = TREE_TYPE (array_type))),
2171 field = TREE_CHAIN (TREE_CHAIN (field)))
2173 tree bounds, min, max;
2175 /* If we have a bound list, get the bounds from there. Likewise
2176 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2177 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2178 This will give us a maximum range. */
2179 if (bound_list != 0)
2180 bounds = TREE_VALUE (bound_list);
2181 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2182 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2183 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2184 && DECL_BY_COMPONENT_PTR_P (expr))
2185 bounds = TREE_TYPE (field);
2189 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2190 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2192 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2193 surround them with a WITH_RECORD_EXPR giving EXPR as the
2195 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2196 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2197 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2198 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2200 template_elts = tree_cons (TREE_CHAIN (field), max,
2201 tree_cons (field, min, template_elts));
2204 return build_constructor (template_type, nreverse (template_elts));
2207 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2208 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2209 in the type contains in its DECL_INITIAL the expression to use when
2210 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2211 to print out an error message if the mechanism cannot be applied to
2212 an object of that type and also for the name. */
2215 build_vms_descriptor (type, mech, gnat_entity)
2217 Mechanism_Type mech;
2218 Entity_Id gnat_entity;
2220 tree record_type = make_node (RECORD_TYPE);
2221 tree field_list = 0;
2230 /* If TYPE is an unconstrained array, use the underlying array type. */
2231 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2232 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2234 /* If this is an array, compute the number of dimensions in the array,
2235 get the index types, and point to the inner type. */
2236 if (TREE_CODE (type) != ARRAY_TYPE)
2239 for (ndim = 1, inner_type = type;
2240 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2241 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2242 ndim++, inner_type = TREE_TYPE (inner_type))
2245 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2247 if (mech != By_Descriptor_NCA
2248 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2249 for (i = ndim - 1, inner_type = type;
2251 i--, inner_type = TREE_TYPE (inner_type))
2252 idx_arr[i] = TYPE_DOMAIN (inner_type);
2254 for (i = 0, inner_type = type;
2256 i++, inner_type = TREE_TYPE (inner_type))
2257 idx_arr[i] = TYPE_DOMAIN (inner_type);
2259 /* Now get the DTYPE value. */
2260 switch (TREE_CODE (type))
2264 if (TYPE_VAX_FLOATING_POINT_P (type))
2265 switch ((int) TYPE_DIGITS_VALUE (type))
2278 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2281 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2284 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2287 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2290 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2293 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2299 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2303 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2304 && TYPE_VAX_FLOATING_POINT_P (type))
2305 switch ((int) TYPE_DIGITS_VALUE (type))
2317 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2328 /* Get the CLASS value. */
2331 case By_Descriptor_A:
2334 case By_Descriptor_NCA:
2337 case By_Descriptor_SB:
2344 /* Make the type for a descriptor for VMS. The first four fields
2345 are the same for all types. */
2348 = chainon (field_list,
2349 make_descriptor_field
2350 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2351 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2353 field_list = chainon (field_list,
2354 make_descriptor_field ("DTYPE",
2355 gnat_type_for_size (8, 1),
2356 record_type, size_int (dtype)));
2357 field_list = chainon (field_list,
2358 make_descriptor_field ("CLASS",
2359 gnat_type_for_size (8, 1),
2360 record_type, size_int (class)));
2363 = chainon (field_list,
2364 make_descriptor_field ("POINTER",
2365 build_pointer_type (type),
2368 build_pointer_type (type),
2369 build (PLACEHOLDER_EXPR,
2375 case By_Descriptor_S:
2378 case By_Descriptor_SB:
2380 = chainon (field_list,
2381 make_descriptor_field
2382 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2383 TREE_CODE (type) == ARRAY_TYPE
2384 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2386 = chainon (field_list,
2387 make_descriptor_field
2388 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2389 TREE_CODE (type) == ARRAY_TYPE
2390 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2393 case By_Descriptor_A:
2394 case By_Descriptor_NCA:
2395 field_list = chainon (field_list,
2396 make_descriptor_field ("SCALE",
2397 gnat_type_for_size (8, 1),
2401 field_list = chainon (field_list,
2402 make_descriptor_field ("DIGITS",
2403 gnat_type_for_size (8, 1),
2408 = chainon (field_list,
2409 make_descriptor_field
2410 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2411 size_int (mech == By_Descriptor_NCA
2413 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2414 : (TREE_CODE (type) == ARRAY_TYPE
2415 && TYPE_CONVENTION_FORTRAN_P (type)
2418 field_list = chainon (field_list,
2419 make_descriptor_field ("DIMCT",
2420 gnat_type_for_size (8, 1),
2424 field_list = chainon (field_list,
2425 make_descriptor_field ("ARSIZE",
2426 gnat_type_for_size (32, 1),
2428 size_in_bytes (type)));
2430 /* Now build a pointer to the 0,0,0... element. */
2431 tem = build (PLACEHOLDER_EXPR, type);
2432 for (i = 0, inner_type = type; i < ndim;
2433 i++, inner_type = TREE_TYPE (inner_type))
2434 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2435 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2438 = chainon (field_list,
2439 make_descriptor_field
2440 ("A0", build_pointer_type (inner_type), record_type,
2441 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2443 /* Next come the addressing coefficients. */
2445 for (i = 0; i < ndim; i++)
2449 = size_binop (MULT_EXPR, tem,
2450 size_binop (PLUS_EXPR,
2451 size_binop (MINUS_EXPR,
2452 TYPE_MAX_VALUE (idx_arr[i]),
2453 TYPE_MIN_VALUE (idx_arr[i])),
2456 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2457 fname[1] = '0' + i, fname[2] = 0;
2459 = chainon (field_list,
2460 make_descriptor_field (fname,
2461 gnat_type_for_size (32, 1),
2462 record_type, idx_length));
2464 if (mech == By_Descriptor_NCA)
2468 /* Finally here are the bounds. */
2469 for (i = 0; i < ndim; i++)
2473 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2475 = chainon (field_list,
2476 make_descriptor_field
2477 (fname, gnat_type_for_size (32, 1), record_type,
2478 TYPE_MIN_VALUE (idx_arr[i])));
2482 = chainon (field_list,
2483 make_descriptor_field
2484 (fname, gnat_type_for_size (32, 1), record_type,
2485 TYPE_MAX_VALUE (idx_arr[i])));
2490 post_error ("unsupported descriptor type for &", gnat_entity);
2493 finish_record_type (record_type, field_list, 0, 1);
2494 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2500 /* Utility routine for above code to make a field. */
2503 make_descriptor_field (name, type, rec_type, initial)
2510 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2512 DECL_INITIAL (field) = initial;
2516 /* Build a type to be used to represent an aliased object whose nominal
2517 type is an unconstrained array. This consists of a RECORD_TYPE containing
2518 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2519 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2520 is used to represent an arbitrary unconstrained object. Use NAME
2521 as the name of the record. */
2524 build_unc_object_type (template_type, object_type, name)
2529 tree type = make_node (RECORD_TYPE);
2530 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2531 template_type, type, 0, 0, 0, 1);
2532 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2535 TYPE_NAME (type) = name;
2536 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2537 finish_record_type (type,
2538 chainon (chainon (NULL_TREE, template_field),
2545 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2546 the normal case this is just two adjustments, but we have more to do
2547 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2550 update_pointer_to (old_type, new_type)
2554 tree ptr = TYPE_POINTER_TO (old_type);
2555 tree ref = TYPE_REFERENCE_TO (old_type);
2558 /* If this is the main variant, process all the other variants first. */
2559 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2560 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2561 type = TYPE_NEXT_VARIANT (type))
2562 update_pointer_to (type, new_type);
2564 /* If no pointer or reference, we are done. Otherwise, get the new type with
2565 the same qualifiers as the old type and see if it is the same as the old
2567 if (ptr == 0 && ref == 0)
2570 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2571 if (old_type == new_type)
2574 /* First handle the simple case. */
2575 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2578 TREE_TYPE (ptr) = new_type;
2579 TYPE_POINTER_TO (new_type) = ptr;
2582 TREE_TYPE (ref) = new_type;
2583 TYPE_REFERENCE_TO (new_type) = ref;
2585 if (ptr != 0 && TYPE_NAME (ptr) != 0
2586 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2587 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2588 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2589 global_bindings_p (), 0);
2590 if (ref != 0 && TYPE_NAME (ref) != 0
2591 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2592 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2593 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2594 global_bindings_p (), 0);
2597 /* Now deal with the unconstrained array case. In this case the "pointer"
2598 is actually a RECORD_TYPE where the types of both fields are
2599 pointers to void. In that case, copy the field list from the
2600 old type to the new one and update the fields' context. */
2601 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2606 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2611 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2612 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2613 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2615 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2618 ??? This is now the only use of gnat_substitute_in_type, which
2619 is now a very "heavy" routine to do this, so it should be replaced
2621 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2622 new_ref = build (COMPONENT_REF, ptr_temp_type,
2623 build (PLACEHOLDER_EXPR, ptr),
2624 TREE_CHAIN (TYPE_FIELDS (ptr)));
2627 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2628 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2629 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2631 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2632 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2634 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2635 = TREE_TYPE (new_type) = ptr;
2637 /* Now handle updating the allocation record, what the thin pointer
2638 points to. Update all pointers from the old record into the new
2639 one, update the types of the fields, and recompute the size. */
2641 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2643 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2644 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2645 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2646 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2647 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2648 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2649 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2651 TYPE_SIZE (new_obj_rec)
2652 = size_binop (PLUS_EXPR,
2653 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2654 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2655 TYPE_SIZE_UNIT (new_obj_rec)
2656 = size_binop (PLUS_EXPR,
2657 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2658 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2659 rest_of_type_compilation (ptr, global_bindings_p ());
2663 /* Convert a pointer to a constrained array into a pointer to a fat
2664 pointer. This involves making or finding a template. */
2667 convert_to_fat_pointer (type, expr)
2671 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2672 tree template, template_addr;
2673 tree etype = TREE_TYPE (expr);
2675 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2676 pointer to the template and array. */
2677 if (integer_zerop (expr))
2681 tree_cons (TYPE_FIELDS (type),
2682 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2683 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2684 convert (build_pointer_type (template_type),
2688 /* If EXPR is a thin pointer, make the template and data from the record. */
2690 else if (TYPE_THIN_POINTER_P (etype))
2692 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2694 expr = save_expr (expr);
2695 if (TREE_CODE (expr) == ADDR_EXPR)
2696 expr = TREE_OPERAND (expr, 0);
2698 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2700 template = build_component_ref (expr, NULL_TREE, fields);
2701 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2702 build_component_ref (expr, NULL_TREE,
2703 TREE_CHAIN (fields)));
2706 /* Otherwise, build the constructor for the template. */
2707 template = build_template (template_type, TREE_TYPE (etype), expr);
2709 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2711 /* The result is a CONSTRUCTOR for the fat pointer. */
2713 build_constructor (type,
2714 tree_cons (TYPE_FIELDS (type), expr,
2715 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2716 template_addr, NULL_TREE)));
2719 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2720 is something that is a fat pointer, so convert to it first if it EXPR
2721 is not already a fat pointer. */
2724 convert_to_thin_pointer (type, expr)
2728 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2730 = convert_to_fat_pointer
2731 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2733 /* We get the pointer to the data and use a NOP_EXPR to make it the
2735 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2736 expr = build1 (NOP_EXPR, type, expr);
2741 /* Create an expression whose value is that of EXPR,
2742 converted to type TYPE. The TREE_TYPE of the value
2743 is always TYPE. This function implements all reasonable
2744 conversions; callers should filter out those that are
2745 not permitted by the language being compiled. */
2748 convert (type, expr)
2751 enum tree_code code = TREE_CODE (type);
2752 tree etype = TREE_TYPE (expr);
2753 enum tree_code ecode = TREE_CODE (etype);
2756 /* If EXPR is already the right type, we are done. */
2760 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2762 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2763 return build (WITH_RECORD_EXPR, type,
2764 convert (type, TREE_OPERAND (expr, 0)),
2765 TREE_OPERAND (expr, 1));
2767 /* If the input type has padding, remove it by doing a component reference
2768 to the field. If the output type has padding, make a constructor
2769 to build the record. If both input and output have padding and are
2770 of variable size, do this as an unchecked conversion. */
2771 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2772 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2773 && (! TREE_CONSTANT (TYPE_SIZE (type))
2774 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2776 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2778 /* If we have just converted to this padded type, just get
2779 the inner expression. */
2780 if (TREE_CODE (expr) == CONSTRUCTOR
2781 && CONSTRUCTOR_ELTS (expr) != 0
2782 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2783 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2785 return convert (type, build_component_ref (expr, NULL_TREE,
2786 TYPE_FIELDS (etype)));
2788 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2790 /* If we previously converted from another type and our type is
2791 of variable size, remove the conversion to avoid the need for
2792 variable-size temporaries. */
2793 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2794 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2795 expr = TREE_OPERAND (expr, 0);
2797 /* If we are just removing the padding from expr, convert the original
2798 object if we have variable size. That will avoid the need
2799 for some variable-size temporaries. */
2800 if (TREE_CODE (expr) == COMPONENT_REF
2801 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2802 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2803 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2804 return convert (type, TREE_OPERAND (expr, 0));
2806 /* If the result type is a padded type with a self-referentially-sized
2807 field and the expression type is a record, do this as an
2808 unchecked converstion. */
2809 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2810 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2811 && TREE_CODE (etype) == RECORD_TYPE)
2812 return unchecked_convert (type, expr);
2816 build_constructor (type,
2817 tree_cons (TYPE_FIELDS (type),
2819 (TYPE_FIELDS (type)),
2824 /* If the input is a biased type, adjust first. */
2825 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2826 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2827 fold (build1 (GNAT_NOP_EXPR,
2828 TREE_TYPE (etype), expr)),
2829 TYPE_MIN_VALUE (etype))));
2831 /* If the input is a left-justified modular type, we need to extract
2832 the actual object before converting it to any other type with the
2833 exception of an unconstrained array. */
2834 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2835 && code != UNCONSTRAINED_ARRAY_TYPE)
2836 return convert (type, build_component_ref (expr, NULL_TREE,
2837 TYPE_FIELDS (etype)));
2839 /* If converting a type that does not contain a template into one
2840 that does, convert to the data type and then build the template. */
2841 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2842 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2844 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2849 tree_cons (TYPE_FIELDS (type),
2850 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2851 obj_type, NULL_TREE),
2852 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2853 convert (obj_type, expr), NULL_TREE)));
2856 /* There are some special cases of expressions that we process
2858 switch (TREE_CODE (expr))
2863 case TRANSFORM_EXPR:
2865 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2866 conversion in gnat_expand_expr. NULL_EXPR does not represent
2867 and actual value, so no conversion is needed. */
2868 TREE_TYPE (expr) = type;
2873 /* If we are converting a STRING_CST to another constrained array type,
2874 just make a new one in the proper type. Likewise for a
2875 CONSTRUCTOR. But if the mode of the type is different, we must
2876 ensure a new RTL is made for the constant. */
2877 if (code == ecode && AGGREGATE_TYPE_P (etype)
2878 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2879 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2881 expr = copy_node (expr);
2882 TREE_TYPE (expr) = type;
2884 if (TYPE_MODE (type) != TYPE_MODE (etype))
2885 TREE_CST_RTL (expr) = 0;
2892 /* If we are converting between two aggregate types of the same
2893 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2894 This avoid unneeded conversions which makes reference computations
2896 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2897 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2898 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2899 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2900 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2901 TREE_OPERAND (expr, 1));
2905 case UNCONSTRAINED_ARRAY_REF:
2906 /* Convert this to the type of the inner array by getting the address of
2907 the array from the template. */
2908 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2909 build_component_ref (TREE_OPERAND (expr, 0),
2910 get_identifier ("P_ARRAY"),
2912 etype = TREE_TYPE (expr);
2913 ecode = TREE_CODE (etype);
2916 case VIEW_CONVERT_EXPR:
2917 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2918 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2919 return convert (type, TREE_OPERAND (expr, 0));
2923 /* If both types are record types, just convert the pointer and
2924 make a new INDIRECT_REF.
2926 ??? Disable this for now since it causes problems with the
2927 code in build_binary_op for MODIFY_EXPR which wants to
2928 strip off conversions. But that code really is a mess and
2929 we need to do this a much better way some time. */
2931 && (TREE_CODE (type) == RECORD_TYPE
2932 || TREE_CODE (type) == UNION_TYPE)
2933 && (TREE_CODE (etype) == RECORD_TYPE
2934 || TREE_CODE (etype) == UNION_TYPE)
2935 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2936 return build_unary_op (INDIRECT_REF, NULL_TREE,
2937 convert (build_pointer_type (type),
2938 TREE_OPERAND (expr, 0)));
2945 /* Check for converting to a pointer to an unconstrained array. */
2946 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2947 return convert_to_fat_pointer (type, expr);
2949 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2950 || (code == INTEGER_CST && ecode == INTEGER_CST
2951 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2952 return fold (build1 (NOP_EXPR, type, expr));
2957 return build1 (CONVERT_EXPR, type, expr);
2960 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2961 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2962 return unchecked_convert (type, expr);
2963 else if (TYPE_BIASED_REPRESENTATION_P (type))
2964 return fold (build1 (CONVERT_EXPR, type,
2965 fold (build (MINUS_EXPR, TREE_TYPE (type),
2966 convert (TREE_TYPE (type), expr),
2967 TYPE_MIN_VALUE (type)))));
2969 /* ... fall through ... */
2972 return fold (convert_to_integer (type, expr));
2975 case REFERENCE_TYPE:
2976 /* If converting between two pointers to records denoting
2977 both a template and type, adjust if needed to account
2978 for any differing offsets, since one might be negative. */
2979 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2982 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2983 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2984 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2985 sbitsize_int (BITS_PER_UNIT));
2987 expr = build1 (NOP_EXPR, type, expr);
2988 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2989 if (integer_zerop (byte_diff))
2992 return build_binary_op (PLUS_EXPR, type, expr,
2993 fold (convert_to_pointer (type, byte_diff)));
2996 /* If converting to a thin pointer, handle specially. */
2997 if (TYPE_THIN_POINTER_P (type)
2998 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
2999 return convert_to_thin_pointer (type, expr);
3001 /* If converting fat pointer to normal pointer, get the pointer to the
3002 array and then convert it. */
3003 else if (TYPE_FAT_POINTER_P (etype))
3004 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3007 return fold (convert_to_pointer (type, expr));
3010 return fold (convert_to_real (type, expr));
3013 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3016 (type, tree_cons (TYPE_FIELDS (type),
3017 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3020 /* ... fall through ... */
3023 /* In these cases, assume the front-end has validated the conversion.
3024 If the conversion is valid, it will be a bit-wise conversion, so
3025 it can be viewed as an unchecked conversion. */
3026 return unchecked_convert (type, expr);
3029 /* Just validate that the type is indeed that of a field
3030 of the type. Then make the simple conversion. */
3031 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3032 if (TREE_TYPE (tem) == etype)
3033 return build1 (CONVERT_EXPR, type, expr);
3037 case UNCONSTRAINED_ARRAY_TYPE:
3038 /* If EXPR is a constrained array, take its address, convert it to a
3039 fat pointer, and then dereference it. Likewise if EXPR is a
3040 record containing both a template and a constrained array.
3041 Note that a record representing a left justified modular type
3042 always represents a packed constrained array. */
3043 if (ecode == ARRAY_TYPE
3044 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3045 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3046 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3049 (INDIRECT_REF, NULL_TREE,
3050 convert_to_fat_pointer (TREE_TYPE (type),
3051 build_unary_op (ADDR_EXPR,
3054 /* Do something very similar for converting one unconstrained
3055 array to another. */
3056 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3058 build_unary_op (INDIRECT_REF, NULL_TREE,
3059 convert (TREE_TYPE (type),
3060 build_unary_op (ADDR_EXPR,
3066 return fold (convert_to_complex (type, expr));
3073 /* Remove all conversions that are done in EXP. This includes converting
3074 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3075 is nonzero, always return the address of the containing object even if
3076 the address is not bit-aligned. */
3079 remove_conversions (exp, true_address)
3083 switch (TREE_CODE (exp))
3087 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3088 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3089 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3093 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3094 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3095 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3098 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3099 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3100 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3109 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3110 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3111 likewise return an expression pointing to the underlying array. */
3114 maybe_unconstrained_array (exp)
3117 enum tree_code code = TREE_CODE (exp);
3120 switch (TREE_CODE (TREE_TYPE (exp)))
3122 case UNCONSTRAINED_ARRAY_TYPE:
3123 if (code == UNCONSTRAINED_ARRAY_REF)
3126 = build_unary_op (INDIRECT_REF, NULL_TREE,
3127 build_component_ref (TREE_OPERAND (exp, 0),
3128 get_identifier ("P_ARRAY"),
3130 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3134 else if (code == NULL_EXPR)
3135 return build1 (NULL_EXPR,
3136 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3137 (TREE_TYPE (TREE_TYPE (exp))))),
3138 TREE_OPERAND (exp, 0));
3140 else if (code == WITH_RECORD_EXPR
3141 && (TREE_OPERAND (exp, 0)
3142 != (new = maybe_unconstrained_array
3143 (TREE_OPERAND (exp, 0)))))
3144 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3145 TREE_OPERAND (exp, 1));
3148 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3151 = build_component_ref (exp, NULL_TREE,
3152 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3153 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3154 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3155 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3168 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3171 unchecked_convert (type, expr)
3175 tree etype = TREE_TYPE (expr);
3177 /* If the expression is already the right type, we are done. */
3181 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3183 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3184 return build (WITH_RECORD_EXPR, type,
3185 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3186 TREE_OPERAND (expr, 1));
3188 /* If both types types are integral just do a normal conversion.
3189 Likewise for a conversion to an unconstrained array. */
3190 if ((((INTEGRAL_TYPE_P (type)
3191 && ! (TREE_CODE (type) == INTEGER_TYPE
3192 && TYPE_VAX_FLOATING_POINT_P (type)))
3193 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3194 || (TREE_CODE (type) == RECORD_TYPE
3195 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3196 && ((INTEGRAL_TYPE_P (etype)
3197 && ! (TREE_CODE (etype) == INTEGER_TYPE
3198 && TYPE_VAX_FLOATING_POINT_P (etype)))
3199 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3200 || (TREE_CODE (etype) == RECORD_TYPE
3201 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3202 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3206 if (TREE_CODE (etype) == INTEGER_TYPE
3207 && TYPE_BIASED_REPRESENTATION_P (etype))
3209 tree ntype = copy_type (etype);
3211 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3212 TYPE_MAIN_VARIANT (ntype) = ntype;
3213 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3216 if (TREE_CODE (type) == INTEGER_TYPE
3217 && TYPE_BIASED_REPRESENTATION_P (type))
3219 rtype = copy_type (type);
3220 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3221 TYPE_MAIN_VARIANT (rtype) = rtype;
3224 expr = convert (rtype, expr);
3226 expr = build1 (GNAT_NOP_EXPR, type, expr);
3229 /* If we are converting TO an integral type whose precision is not the
3230 same as its size, first unchecked convert to a record that contains
3231 an object of the output type. Then extract the field. */
3232 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3233 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3234 GET_MODE_BITSIZE (TYPE_MODE (type))))
3236 tree rec_type = make_node (RECORD_TYPE);
3237 tree field = create_field_decl (get_identifier ("OBJ"), type,
3238 rec_type, 1, 0, 0, 0);
3240 TYPE_FIELDS (rec_type) = field;
3241 layout_type (rec_type);
3243 expr = unchecked_convert (rec_type, expr);
3244 expr = build_component_ref (expr, NULL_TREE, field);
3247 /* Similarly for integral input type whose precision is not equal to its
3249 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3250 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3251 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3253 tree rec_type = make_node (RECORD_TYPE);
3255 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3258 TYPE_FIELDS (rec_type) = field;
3259 layout_type (rec_type);
3261 expr = build_constructor (rec_type, build_tree_list (field, expr));
3262 expr = unchecked_convert (type, expr);
3265 /* We have a special case when we are converting between two
3266 unconstrained array types. In that case, take the address,
3267 convert the fat pointer types, and dereference. */
3268 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3269 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3270 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3271 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3272 build_unary_op (ADDR_EXPR, NULL_TREE,
3276 expr = maybe_unconstrained_array (expr);
3277 etype = TREE_TYPE (expr);
3278 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3281 /* If the result is an integral type whose size is not equal to
3282 the size of the underlying machine type, sign- or zero-extend
3283 the result. We need not do this in the case where the input is
3284 an integral type of the same precision and signedness or if the output
3285 is a biased type or if both the input and output are unsigned. */
3286 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3287 && ! (TREE_CODE (type) == INTEGER_TYPE
3288 && TYPE_BIASED_REPRESENTATION_P (type))
3289 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3290 GET_MODE_BITSIZE (TYPE_MODE (type)))
3291 && ! (INTEGRAL_TYPE_P (etype)
3292 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3293 && operand_equal_p (TYPE_RM_SIZE (type),
3294 (TYPE_RM_SIZE (etype) != 0
3295 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3297 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3299 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3300 TREE_UNSIGNED (type));
3302 = convert (base_type,
3303 size_binop (MINUS_EXPR,
3305 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3306 TYPE_RM_SIZE (type)));
3309 build_binary_op (RSHIFT_EXPR, base_type,
3310 build_binary_op (LSHIFT_EXPR, base_type,
3311 convert (base_type, expr),
3316 /* An unchecked conversion should never raise Constraint_Error. The code
3317 below assumes that GCC's conversion routines overflow the same way that
3318 the underlying hardware does. This is probably true. In the rare case
3319 when it is false, we can rely on the fact that such conversions are
3320 erroneous anyway. */
3321 if (TREE_CODE (expr) == INTEGER_CST)
3322 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3324 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3325 show no longer constant. */
3326 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3327 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3328 TREE_CONSTANT (expr) = 0;
3333 #include "gt-ada-utils.h"
3334 #include "gtype-ada.h"