1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
11 * Copyright (C) 1992-2001, Free Software Foundation, Inc. *
13 * GNAT is free software; you can redistribute it and/or modify it under *
14 * terms of the GNU General Public License as published by the Free Soft- *
15 * ware Foundation; either version 2, or (at your option) any later ver- *
16 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
17 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
18 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
19 * for more details. You should have received a copy of the GNU General *
20 * Public License distributed with GNAT; see file COPYING. If not, write *
21 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
22 * MA 02111-1307, USA. *
24 * GNAT was originally developed by the GNAT team at New York University. *
25 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
27 ****************************************************************************/
53 #ifndef MAX_FIXED_MODE_SIZE
54 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
57 #ifndef MAX_BITS_PER_WORD
58 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 /* If nonzero, pretend we are allocating at global level. */
64 /* Global Variables for the various types we create. */
65 tree gnat_std_decls[(int) ADT_LAST];
67 /* Associates a GNAT tree node to a GCC tree node. It is used in
68 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
69 of `save_gnu_tree' for more info. */
70 static tree *associate_gnat_to_gnu;
72 /* This listhead is used to record any global objects that need elaboration.
73 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
74 initial value to assign. */
76 static tree pending_elaborations;
78 /* This stack allows us to momentarily switch to generating elaboration
79 lists for an inner context. */
81 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
83 /* This variable keeps a table for types for each precision so that we only
84 allocate each of them once. Signed and unsigned types are kept separate.
86 Note that these types are only used when fold-const requests something
87 special. Perhaps we should NOT share these types; we'll see how it
89 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
91 /* Likewise for float types, but record these by mode. */
92 static tree float_types[NUM_MACHINE_MODES];
94 /* For each binding contour we allocate a binding_level structure which records
95 the entities defined or declared in that contour. Contours include:
98 one for each subprogram definition
99 one for each compound statement (declare block)
101 Binding contours are used to create GCC tree BLOCK nodes. */
105 /* A chain of ..._DECL nodes for all variables, constants, functions,
106 parameters and type declarations. These ..._DECL nodes are chained
107 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
108 in the reverse of the order supplied to be compatible with the
111 /* For each level (except the global one), a chain of BLOCK nodes for all
112 the levels that were entered and exited one level down from this one. */
114 /* The BLOCK node for this level, if one has been preallocated.
115 If 0, the BLOCK is allocated (if needed) when the level is popped. */
117 /* The binding level containing this one (the enclosing binding level). */
118 struct binding_level *level_chain;
121 /* The binding level currently in effect. */
122 static struct binding_level *current_binding_level = NULL;
124 /* A chain of binding_level structures awaiting reuse. */
125 static struct binding_level *free_binding_level = NULL;
127 /* The outermost binding level. This binding level is created when the
128 compiler is started and it will exist through the entire compilation. */
129 static struct binding_level *global_binding_level;
131 /* Binding level structures are initialized by copying this one. */
132 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
135 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
136 static tree compute_related_constant PARAMS ((tree, tree));
137 static tree split_plus PARAMS ((tree, tree *));
138 static int value_zerop PARAMS ((tree));
139 static tree float_type_for_size PARAMS ((int, enum machine_mode));
140 static tree convert_to_fat_pointer PARAMS ((tree, tree));
141 static tree convert_to_thin_pointer PARAMS ((tree, tree));
142 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
144 static void mark_binding_level PARAMS((PTR));
145 static void mark_e_stack PARAMS((PTR));
147 /* Initialize the association of GNAT nodes to GCC trees. */
154 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
155 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
157 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
158 associate_gnat_to_gnu [gnat_node] = NULL_TREE;
160 associate_gnat_to_gnu -= First_Node_Id;
162 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
163 ggc_add_tree_root (&pending_elaborations, 1);
164 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
165 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
166 (sizeof signed_and_unsigned_types
167 / sizeof signed_and_unsigned_types[0][0]));
168 ggc_add_tree_root (float_types, sizeof float_types / sizeof float_types[0]);
170 ggc_add_root (¤t_binding_level, 1, sizeof current_binding_level,
174 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
175 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
176 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
178 If GNU_DECL is zero, a previous association is to be reset. */
181 save_gnu_tree (gnat_entity, gnu_decl, no_check)
182 Entity_Id gnat_entity;
187 && (associate_gnat_to_gnu [gnat_entity]
188 || (! no_check && ! DECL_P (gnu_decl))))
191 associate_gnat_to_gnu [gnat_entity] = gnu_decl;
194 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
195 Return the ..._DECL node that was associated with it. If there is no tree
196 node associated with GNAT_ENTITY, abort.
198 In some cases, such as delayed elaboration or expressions that need to
199 be elaborated only once, GNAT_ENTITY is really not an entity. */
202 get_gnu_tree (gnat_entity)
203 Entity_Id gnat_entity;
205 if (! associate_gnat_to_gnu [gnat_entity])
208 return associate_gnat_to_gnu [gnat_entity];
211 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
214 present_gnu_tree (gnat_entity)
215 Entity_Id gnat_entity;
217 return (associate_gnat_to_gnu [gnat_entity] != NULL_TREE);
221 /* Return non-zero if we are currently in the global binding level. */
226 return (force_global != 0 || current_binding_level == global_binding_level
230 /* Return the list of declarations in the current level. Note that this list
231 is in reverse order (it has to be so for back-end compatibility). */
236 return current_binding_level->names;
239 /* Nonzero if the current level needs to have a BLOCK made. */
244 return (current_binding_level->names != 0);
247 /* Enter a new binding level. The input parameter is ignored, but has to be
248 specified for back-end compatibility. */
252 int ignore ATTRIBUTE_UNUSED;
254 struct binding_level *newlevel = NULL;
256 /* Reuse a struct for this binding level, if there is one. */
257 if (free_binding_level)
259 newlevel = free_binding_level;
260 free_binding_level = free_binding_level->level_chain;
264 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
266 *newlevel = clear_binding_level;
268 /* Add this level to the front of the chain (stack) of levels that are
270 newlevel->level_chain = current_binding_level;
271 current_binding_level = newlevel;
274 /* Exit a binding level.
275 Pop the level off, and restore the state of the identifier-decl mappings
276 that were in effect when this level was entered.
278 If KEEP is nonzero, this level had explicit declarations, so
279 and create a "block" (a BLOCK node) for the level
280 to record its declarations and subblocks for symbol table output.
282 If FUNCTIONBODY is nonzero, this level is the body of a function,
283 so create a block as if KEEP were set and also clear out all
286 If REVERSE is nonzero, reverse the order of decls before putting
287 them into the BLOCK. */
290 poplevel (keep, reverse, functionbody)
295 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
296 binding level that we are about to exit and which is returned by this
298 tree block = NULL_TREE;
301 tree subblock_chain = current_binding_level->blocks;
303 int block_previously_created;
305 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
306 nodes chained through the `names' field of current_binding_level are in
307 reverse order except for PARM_DECL node, which are explicitely stored in
309 current_binding_level->names
310 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
311 : current_binding_level->names;
313 /* Output any nested inline functions within this block which must be
314 compiled because their address is needed. */
315 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
316 if (TREE_CODE (decl_node) == FUNCTION_DECL
317 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
318 && DECL_INITIAL (decl_node) != 0)
320 push_function_context ();
321 output_inline_function (decl_node);
322 pop_function_context ();
326 block_previously_created = (current_binding_level->this_block != 0);
327 if (block_previously_created)
328 block = current_binding_level->this_block;
329 else if (keep || functionbody)
330 block = make_node (BLOCK);
333 BLOCK_VARS (block) = keep ? decl_chain : 0;
334 BLOCK_SUBBLOCKS (block) = subblock_chain;
337 /* Record the BLOCK node just built as the subblock its enclosing scope. */
338 for (subblock_node = subblock_chain; subblock_node;
339 subblock_node = TREE_CHAIN (subblock_node))
340 BLOCK_SUPERCONTEXT (subblock_node) = block;
342 /* Clear out the meanings of the local variables of this level. */
344 for (subblock_node = decl_chain; subblock_node;
345 subblock_node = TREE_CHAIN (subblock_node))
346 if (DECL_NAME (subblock_node) != 0)
347 /* If the identifier was used or addressed via a local extern decl,
348 don't forget that fact. */
349 if (DECL_EXTERNAL (subblock_node))
351 if (TREE_USED (subblock_node))
352 TREE_USED (DECL_NAME (subblock_node)) = 1;
353 if (TREE_ADDRESSABLE (subblock_node))
354 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
358 /* Pop the current level, and free the structure for reuse. */
359 struct binding_level *level = current_binding_level;
360 current_binding_level = current_binding_level->level_chain;
361 level->level_chain = free_binding_level;
362 free_binding_level = level;
367 /* This is the top level block of a function. The ..._DECL chain stored
368 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
369 leave them in the BLOCK because they are found in the FUNCTION_DECL
371 DECL_INITIAL (current_function_decl) = block;
372 BLOCK_VARS (block) = 0;
376 if (!block_previously_created)
377 current_binding_level->blocks
378 = chainon (current_binding_level->blocks, block);
381 /* If we did not make a block for the level just exited, any blocks made for
382 inner levels (since they cannot be recorded as subblocks in that level)
383 must be carried forward so they will later become subblocks of something
385 else if (subblock_chain)
386 current_binding_level->blocks
387 = chainon (current_binding_level->blocks, subblock_chain);
389 TREE_USED (block) = 1;
394 /* Insert BLOCK at the end of the list of subblocks of the
395 current binding level. This is used when a BIND_EXPR is expanded,
396 to handle the BLOCK node inside the BIND_EXPR. */
402 TREE_USED (block) = 1;
403 current_binding_level->blocks
404 = chainon (current_binding_level->blocks, block);
407 /* Set the BLOCK node for the innermost scope
408 (the one we are currently in). */
414 current_binding_level->this_block = block;
415 current_binding_level->names = chainon (current_binding_level->names,
417 current_binding_level->blocks = chainon (current_binding_level->blocks,
418 BLOCK_SUBBLOCKS (block));
421 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
422 Returns the ..._DECL node. */
428 struct binding_level *b;
430 /* If at top level, there is no context. But PARM_DECLs always go in the
431 level of its function. */
432 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
434 b = global_binding_level;
435 DECL_CONTEXT (decl) = 0;
439 b = current_binding_level;
440 DECL_CONTEXT (decl) = current_function_decl;
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later if necessary. This needs to be
445 this way for compatibility with the back-end.
447 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
448 will cause trouble with the debugger and aren't needed anyway. */
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
452 TREE_CHAIN (decl) = b->names;
456 /* For the declaration of a type, set its name if it either is not already
457 set, was set to an IDENTIFIER_NODE, indicating an internal name,
458 or if the previous type name was not derived from a source name.
459 We'd rather have the type named with a real name and all the pointer
460 types to the same object have the same POINTER_TYPE node. Code in this
461 function in c-decl.c makes a copy of the type node here, but that may
462 cause us trouble with incomplete types, so let's not try it (at least
465 if (TREE_CODE (decl) == TYPE_DECL
466 && DECL_NAME (decl) != 0
467 && (TYPE_NAME (TREE_TYPE (decl)) == 0
468 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
469 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
470 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
471 && ! DECL_ARTIFICIAL (decl))))
472 TYPE_NAME (TREE_TYPE (decl)) = decl;
477 /* Do little here. Set up the standard declarations later after the
478 front end has been run. */
481 gnat_init_decl_processing ()
485 /* incomplete_decl_finalize_hook is defined in toplev.c. It needs to be set
486 by each front end to the appropriate routine that handles incomplete
487 VAR_DECL nodes. This routine will be invoked by compile_file when a
488 VAR_DECL node of DECL_SIZE zero is encountered. */
489 incomplete_decl_finalize_hook = finish_incomplete_decl;
491 /* Make the binding_level structure for global names. */
492 current_function_decl = 0;
493 current_binding_level = 0;
494 free_binding_level = 0;
496 global_binding_level = current_binding_level;
498 build_common_tree_nodes (0);
500 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
501 corresponding to the size of ptr_mode. Make this here since we need
502 this before we can expand the GNAT types. */
503 set_sizetype (type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
504 build_common_tree_nodes_2 (0);
506 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
508 /* We need to make the integer type before doing anything else.
509 We stitch this in to the appropriate GNAT type later. */
510 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
512 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
515 ptr_void_type_node = build_pointer_type (void_type_node);
519 /* Create the predefined scalar types such as `integer_type_node' needed
520 in the gcc back-end and initialize the global binding level. */
523 init_gigi_decls (long_long_float_type, exception_type)
524 tree long_long_float_type, exception_type;
528 /* Set the types that GCC and Gigi use from the front end. We would like
529 to do this for char_type_node, but it needs to correspond to the C
531 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
533 /* In this case, the builtin floating point types are VAX float,
534 so make up a type for use. */
535 longest_float_type_node = make_node (REAL_TYPE);
536 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
537 layout_type (longest_float_type_node);
538 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
539 longest_float_type_node));
542 longest_float_type_node = TREE_TYPE (long_long_float_type);
544 except_type_node = TREE_TYPE (exception_type);
546 unsigned_type_node = type_for_size (INT_TYPE_SIZE, 1);
547 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
548 unsigned_type_node));
551 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
554 void_ftype = build_function_type (void_type_node, NULL_TREE);
555 ptr_void_ftype = build_pointer_type (void_ftype);
557 /* Now declare runtime functions. */
558 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
560 /* malloc is a function declaration tree for a function to allocate
562 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
564 build_function_type (ptr_void_type_node,
565 tree_cons (NULL_TREE,
568 NULL_TREE, 0, 1, 1, 0);
570 /* free is a function declaration tree for a function to free memory. */
573 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
574 build_function_type (void_type_node,
575 tree_cons (NULL_TREE,
578 NULL_TREE, 0, 1, 1, 0);
580 /* Make the types and functions used for exception processing. */
582 = build_array_type (type_for_mode (Pmode, 0),
583 build_index_type (build_int_2 (5, 0)));
584 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
585 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
587 /* Functions to get and set the jumpbuf pointer for the current thread. */
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
591 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
592 NULL_TREE, 0, 1, 1, 0);
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
598 build_function_type (void_type_node,
599 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
600 NULL_TREE, 0, 1, 1, 0);
602 /* Function to get the current exception. */
604 = create_subprog_decl
605 (get_identifier ("system__soft_links__get_gnat_exception"),
607 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
608 NULL_TREE, 0, 1, 1, 0);
610 /* Function that raise exceptions. */
612 = create_subprog_decl
613 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
614 build_function_type (void_type_node,
615 tree_cons (NULL_TREE,
616 build_pointer_type (except_type_node),
618 NULL_TREE, 0, 1, 1, 0);
621 /* __gnat_raise_constraint_error takes a string, an integer and never
623 raise_constraint_error_decl
624 = create_subprog_decl
625 (get_identifier ("__gnat_raise_constraint_error"), NULL_TREE,
626 build_function_type (void_type_node,
627 tree_cons (NULL_TREE,
628 build_pointer_type (char_type_node),
629 tree_cons (NULL_TREE,
632 NULL_TREE, 0, 1, 1, 0);
634 /* Likewise for __gnat_raise_program_error. */
635 raise_program_error_decl
636 = create_subprog_decl
637 (get_identifier ("__gnat_raise_program_error"), NULL_TREE,
638 build_function_type (void_type_node,
639 tree_cons (NULL_TREE,
640 build_pointer_type (char_type_node),
641 tree_cons (NULL_TREE,
644 NULL_TREE, 0, 1, 1, 0);
646 /* Likewise for __gnat_raise_storage_error. */
647 raise_storage_error_decl
648 = create_subprog_decl
649 (get_identifier ("__gnat_raise_storage_error"), NULL_TREE,
650 build_function_type (void_type_node,
651 tree_cons (NULL_TREE,
652 build_pointer_type (char_type_node),
653 tree_cons (NULL_TREE,
656 NULL_TREE, 0, 1, 1, 0);
658 /* Indicate that these never return. */
660 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
661 TREE_THIS_VOLATILE (raise_constraint_error_decl) = 1;
662 TREE_THIS_VOLATILE (raise_program_error_decl) = 1;
663 TREE_THIS_VOLATILE (raise_storage_error_decl) = 1;
665 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
666 TREE_SIDE_EFFECTS (raise_constraint_error_decl) = 1;
667 TREE_SIDE_EFFECTS (raise_program_error_decl) = 1;
668 TREE_SIDE_EFFECTS (raise_storage_error_decl) = 1;
670 TREE_TYPE (raise_nodefer_decl)
671 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
673 TREE_TYPE (raise_constraint_error_decl)
674 = build_qualified_type (TREE_TYPE (raise_constraint_error_decl),
676 TREE_TYPE (raise_program_error_decl)
677 = build_qualified_type (TREE_TYPE (raise_program_error_decl),
679 TREE_TYPE (raise_storage_error_decl)
680 = build_qualified_type (TREE_TYPE (raise_storage_error_decl),
683 /* setjmp returns an integer and has one operand, which is a pointer to
686 = create_subprog_decl
687 (get_identifier ("setjmp"), NULL_TREE,
688 build_function_type (integer_type_node,
689 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
690 NULL_TREE, 0, 1, 1, 0);
692 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
693 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
695 ggc_add_tree_root (gnat_std_decls,
696 sizeof gnat_std_decls / sizeof gnat_std_decls[0]);
699 /* This routine is called in tree.c to print an error message for invalid use
700 of an incomplete type. */
703 incomplete_type_error (dont_care_1, dont_care_2)
704 tree dont_care_1 ATTRIBUTE_UNUSED;
705 tree dont_care_2 ATTRIBUTE_UNUSED;
710 /* This function is called indirectly from toplev.c to handle incomplete
711 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
712 compile_file in toplev.c makes an indirect call through the function pointer
713 incomplete_decl_finalize_hook which is initialized to this routine in
714 init_decl_processing. */
717 finish_incomplete_decl (dont_care)
718 tree dont_care ATTRIBUTE_UNUSED;
723 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
724 nodes (FIELDLIST), finish constructing the record or union type.
725 If HAS_REP is nonzero, this record has a rep clause; don't call
726 layout_type but merely set the size and alignment ourselves.
727 If DEFER_DEBUG is nonzero, do not call the debugging routines
728 on this type; it will be done later. */
731 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
737 enum tree_code code = TREE_CODE (record_type);
738 tree ada_size = bitsize_zero_node;
739 tree size = bitsize_zero_node;
740 tree size_unit = size_zero_node;
743 TYPE_FIELDS (record_type) = fieldlist;
745 if (TYPE_NAME (record_type) != 0
746 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
747 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
749 TYPE_STUB_DECL (record_type)
750 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
753 /* We don't need both the typedef name and the record name output in
754 the debugging information, since they are the same. */
755 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
757 /* Globally initialize the record first. If this is a rep'ed record,
758 that just means some initializations; otherwise, layout the record. */
762 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
763 TYPE_MODE (record_type) = BLKmode;
764 if (TYPE_SIZE (record_type) == 0)
766 TYPE_SIZE (record_type) = bitsize_zero_node;
767 TYPE_SIZE_UNIT (record_type) = size_zero_node;
772 /* Ensure there isn't a size already set. There can be in an error
773 case where there is a rep clause but all fields have errors and
774 no longer have a position. */
775 TYPE_SIZE (record_type) = 0;
776 layout_type (record_type);
779 /* At this point, the position and size of each field is known. It was
780 either set before entry by a rep clause, or by laying out the type
781 above. We now make a pass through the fields (in reverse order for
782 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
783 (for rep'ed records that are not padding types); and the mode (for
786 if (code == QUAL_UNION_TYPE)
787 fieldlist = nreverse (fieldlist);
789 for (field = fieldlist; field; field = TREE_CHAIN (field))
791 tree type = TREE_TYPE (field);
792 tree this_size = DECL_SIZE (field);
793 tree this_size_unit = DECL_SIZE_UNIT (field);
794 tree this_ada_size = DECL_SIZE (field);
796 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
797 || TREE_CODE (type) == QUAL_UNION_TYPE)
798 && ! TYPE_IS_FAT_POINTER_P (type)
799 && ! TYPE_CONTAINS_TEMPLATE_P (type)
800 && TYPE_ADA_SIZE (type) != 0)
801 this_ada_size = TYPE_ADA_SIZE (type);
803 if (has_rep && ! DECL_BIT_FIELD (field))
804 TYPE_ALIGN (record_type)
805 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
810 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
811 size = size_binop (MAX_EXPR, size, this_size);
812 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
815 case QUAL_UNION_TYPE:
817 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
818 this_ada_size, ada_size));
819 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
821 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
822 this_size_unit, size_unit));
826 /* Since we know here that all fields are sorted in order of
827 increasing bit position, the size of the record is one
828 higher than the ending bit of the last field processed
829 unless we have a rep clause, since in that case we might
830 have a field outside a QUAL_UNION_TYPE that has a higher ending
831 position. So use a MAX in that case. Also, if this field is a
832 QUAL_UNION_TYPE, we need to take into account the previous size in
833 the case of empty variants. */
835 = merge_sizes (ada_size, bit_position (field), this_ada_size,
836 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
837 size = merge_sizes (size, bit_position (field), this_size,
838 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
840 = merge_sizes (size_unit, byte_position (field), this_size_unit,
841 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
849 if (code == QUAL_UNION_TYPE)
850 nreverse (fieldlist);
852 /* If this is a padding record, we never want to make the size smaller than
853 what was specified in it, if any. */
854 if (TREE_CODE (record_type) == RECORD_TYPE
855 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
857 size = TYPE_SIZE (record_type);
858 size_unit = TYPE_SIZE_UNIT (record_type);
861 /* Now set any of the values we've just computed that apply. */
862 if (! TYPE_IS_FAT_POINTER_P (record_type)
863 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
864 TYPE_ADA_SIZE (record_type) = ada_size;
866 #ifdef ROUND_TYPE_SIZE
867 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
868 size_unit = ROUND_TYPE_SIZE_UNIT (record_size, size_unit,
869 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
871 size = round_up (size, TYPE_ALIGN (record_type));
872 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
876 && ! (TREE_CODE (record_type) == RECORD_TYPE
877 && TYPE_IS_PADDING_P (record_type)
878 && TREE_CODE (size) != INTEGER_CST
879 && contains_placeholder_p (size)))
881 TYPE_SIZE (record_type) = size;
882 TYPE_SIZE_UNIT (record_type) = size_unit;
886 compute_record_mode (record_type);
890 /* If this record is of variable size, rename it so that the
891 debugger knows it is and make a new, parallel, record
892 that tells the debugger how the record is laid out. See
894 if (TREE_CODE (TYPE_SIZE (record_type)) != INTEGER_CST)
897 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
898 ? UNION_TYPE : TREE_CODE (record_type));
899 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
901 = concat_id_with_name (orig_id,
902 TREE_CODE (record_type) == QUAL_UNION_TYPE
904 tree last_pos = bitsize_zero_node;
907 TYPE_NAME (new_record_type) = new_id;
908 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
909 TYPE_STUB_DECL (new_record_type)
910 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
911 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
912 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
913 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
914 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
916 /* Now scan all the fields, replacing each field with a new
917 field corresponding to the new encoding. */
918 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
919 old_field = TREE_CHAIN (old_field))
921 tree field_type = TREE_TYPE (old_field);
922 tree field_name = DECL_NAME (old_field);
924 tree curpos = bit_position (old_field);
926 unsigned int align = 0;
929 /* See how the position was modified from the last position.
931 There are two basic cases we support: a value was added
932 to the last position or the last position was rounded to
933 a boundary and they something was added. Check for the
934 first case first. If not, see if there is any evidence
935 of rounding. If so, round the last position and try
938 If this is a union, the position can be taken as zero. */
940 if (TREE_CODE (new_record_type) == UNION_TYPE)
941 pos = bitsize_zero_node, align = 0;
943 pos = compute_related_constant (curpos, last_pos);
945 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
946 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
948 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
949 pos = compute_related_constant (curpos,
950 round_up (last_pos, align));
952 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
953 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
954 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
955 && host_integerp (TREE_OPERAND
956 (TREE_OPERAND (curpos, 0), 1),
961 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
962 pos = compute_related_constant (curpos,
963 round_up (last_pos, align));
966 /* If we can't compute a position, set it to zero.
968 ??? We really should abort here, but it's too much work
969 to get this correct for all cases. */
972 pos = bitsize_zero_node;
974 /* See if this type is variable-size and make a new type
975 and indicate the indirection if so. */
976 if (TREE_CODE (TYPE_SIZE (field_type)) != INTEGER_CST)
978 field_type = build_pointer_type (field_type);
982 /* Make a new field name, if necessary. */
983 if (var || align != 0)
988 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
989 align / BITS_PER_UNIT);
991 strcpy (suffix, "XVL");
993 field_name = concat_id_with_name (field_name, suffix);
996 new_field = create_field_decl (field_name, field_type,
998 TYPE_SIZE (field_type), pos, 0);
999 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1000 TYPE_FIELDS (new_record_type) = new_field;
1002 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1003 zero. The only time it's not the last field of the record
1004 is when there are other components at fixed positions after
1005 it (meaning there was a rep clause for every field) and we
1006 want to be able to encode them. */
1007 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1008 (TREE_CODE (TREE_TYPE (old_field))
1011 : TYPE_SIZE (TREE_TYPE (old_field)));
1014 TYPE_FIELDS (new_record_type)
1015 = nreverse (TYPE_FIELDS (new_record_type));
1017 rest_of_type_compilation (new_record_type, global_bindings_p ());
1020 rest_of_type_compilation (record_type, global_bindings_p ());
1024 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1025 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1026 if this represents a QUAL_UNION_TYPE in which case we must look for
1027 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1028 is nonzero, we must take the MAX of the end position of this field
1029 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1031 We return an expression for the size. */
1034 merge_sizes (last_size, first_bit, size, special, has_rep)
1036 tree first_bit, size;
1040 tree type = TREE_TYPE (last_size);
1042 if (! special || TREE_CODE (size) != COND_EXPR)
1044 tree new = size_binop (PLUS_EXPR, first_bit, size);
1047 new = size_binop (MAX_EXPR, last_size, new);
1052 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1053 integer_zerop (TREE_OPERAND (size, 1))
1054 ? last_size : merge_sizes (last_size, first_bit,
1055 TREE_OPERAND (size, 1),
1057 integer_zerop (TREE_OPERAND (size, 2))
1058 ? last_size : merge_sizes (last_size, first_bit,
1059 TREE_OPERAND (size, 2),
1063 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1064 related by the addition of a constant. Return that constant if so. */
1067 compute_related_constant (op0, op1)
1070 tree op0_var, op1_var;
1071 tree op0_con = split_plus (op0, &op0_var);
1072 tree op1_con = split_plus (op1, &op1_var);
1073 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1075 if (operand_equal_p (op0_var, op1_var, 0))
1077 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1083 /* Utility function of above to split a tree OP which may be a sum, into a
1084 constant part, which is returned, and a variable part, which is stored
1085 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1089 split_plus (in, pvar)
1093 tree result = bitsize_zero_node;
1095 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1096 in = TREE_OPERAND (in, 0);
1099 if (TREE_CODE (in) == INTEGER_CST)
1101 *pvar = bitsize_zero_node;
1104 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1106 tree lhs_var, rhs_var;
1107 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1108 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1110 result = size_binop (PLUS_EXPR, result, lhs_con);
1111 result = size_binop (TREE_CODE (in), result, rhs_con);
1113 if (lhs_var == TREE_OPERAND (in, 0)
1114 && rhs_var == TREE_OPERAND (in, 1))
1115 return bitsize_zero_node;
1117 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1121 return bitsize_zero_node;
1124 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1125 subprogram. If it is void_type_node, then we are dealing with a procedure,
1126 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1127 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1128 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1129 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1130 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1131 RETURNS_WITH_DSP is nonzero if the function is to return with a
1132 depressed stack pointer. */
1135 create_subprog_type (return_type, param_decl_list, cico_list,
1136 returns_unconstrained, returns_by_ref, returns_with_dsp)
1138 tree param_decl_list;
1140 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1142 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1143 the subprogram formal parameters. This list is generated by traversing the
1144 input list of PARM_DECL nodes. */
1145 tree param_type_list = NULL;
1149 for (param_decl = param_decl_list; param_decl;
1150 param_decl = TREE_CHAIN (param_decl))
1151 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1154 /* The list of the function parameter types has to be terminated by the void
1155 type to signal to the back-end that we are not dealing with a variable
1156 parameter subprogram, but that the subprogram has a fixed number of
1158 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1160 /* The list of argument types has been created in reverse
1162 param_type_list = nreverse (param_type_list);
1164 type = build_function_type (return_type, param_type_list);
1166 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1167 or the new type should, make a copy of TYPE. Likewise for
1168 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1169 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1170 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1171 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1172 type = copy_type (type);
1174 TYPE_CI_CO_LIST (type) = cico_list;
1175 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1176 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1177 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1181 /* Return a copy of TYPE but safe to modify in any way. */
1187 tree new = copy_node (type);
1189 /* copy_node clears this field instead of copying it, because it is
1190 aliased with TREE_CHAIN. */
1191 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1193 TYPE_POINTER_TO (new) = 0;
1194 TYPE_REFERENCE_TO (new) = 0;
1195 TYPE_MAIN_VARIANT (new) = new;
1196 TYPE_NEXT_VARIANT (new) = 0;
1201 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1202 TYPE_INDEX_TYPE is INDEX. */
1205 create_index_type (min, max, index)
1209 /* First build a type for the desired range. */
1210 tree type = build_index_2_type (min, max);
1212 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1213 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1214 is set, but not to INDEX, make a copy of this type with the requested
1215 index type. Note that we have no way of sharing these types, but that's
1216 only a small hole. */
1217 if (TYPE_INDEX_TYPE (type) == index)
1219 else if (TYPE_INDEX_TYPE (type) != 0)
1220 type = copy_type (type);
1222 TYPE_INDEX_TYPE (type) = index;
1226 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1227 string) and TYPE is a ..._TYPE node giving its data type.
1228 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1229 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1230 information about this type. */
1233 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1236 struct attrib *attr_list;
1240 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1241 enum tree_code code = TREE_CODE (type);
1243 DECL_ARTIFICIAL (type_decl) = artificial_p;
1244 pushdecl (type_decl);
1245 process_attributes (type_decl, attr_list);
1247 /* Pass type declaration information to the debugger unless this is an
1248 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1249 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1250 a dummy type, which will be completed later, or a type for which
1251 debugging information was not requested. */
1252 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1254 DECL_IGNORED_P (type_decl) = 1;
1255 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1256 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1257 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1258 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1263 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1264 ASM_NAME is its assembler name (if provided). TYPE is its data type
1265 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1266 expression; NULL_TREE if none.
1268 CONST_FLAG is nonzero if this variable is constant.
1270 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1271 the current compilation unit. This flag should be set when processing the
1272 variable definitions in a package specification. EXTERN_FLAG is nonzero
1273 when processing an external variable declaration (as opposed to a
1274 definition: no storage is to be allocated for the variable here).
1276 STATIC_FLAG is only relevant when not at top level. In that case
1277 it indicates whether to always allocate storage to the variable. */
1280 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1281 extern_flag, static_flag, attr_list)
1290 struct attrib *attr_list;
1295 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1296 && (global_bindings_p () || static_flag
1297 ? 0 != initializer_constant_valid_p (var_init,
1298 TREE_TYPE (var_init))
1299 : TREE_CONSTANT (var_init))));
1301 = build_decl ((const_flag && init_const
1302 /* Only make a CONST_DECL for sufficiently-small objects.
1303 We consider complex double "sufficiently-small" */
1304 && TYPE_SIZE (type) != 0
1305 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1306 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1307 GET_MODE_SIZE (DCmode)))
1308 ? CONST_DECL : VAR_DECL, var_name, type);
1309 tree assign_init = 0;
1311 /* If this is external, throw away any initializations unless this is a
1312 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1313 we are defining a global here, leave a constant initialization and save
1314 any variable elaborations for the elaboration routine. Otherwise, if
1315 the initializing expression is not the same as TYPE, generate the
1316 initialization with an assignment statement, since it knows how
1317 to do the required adjustents. */
1319 if (extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1322 if (global_bindings_p () && var_init != 0 && ! init_const)
1324 add_pending_elaborations (var_decl, var_init);
1328 else if (var_init != 0
1329 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1330 != TYPE_MAIN_VARIANT (type))
1331 || (static_flag && ! init_const)))
1332 assign_init = var_init, var_init = 0;
1334 DECL_COMMON (var_decl) = !flag_no_common;
1335 DECL_INITIAL (var_decl) = var_init;
1336 TREE_READONLY (var_decl) = const_flag;
1337 DECL_EXTERNAL (var_decl) = extern_flag;
1338 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1339 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1340 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1341 = TYPE_VOLATILE (type);
1343 /* At the global binding level we need to allocate static storage for the
1344 variable if and only if its not external. If we are not at the top level
1345 we allocate automatic storage unless requested not to. */
1346 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1349 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1351 process_attributes (var_decl, attr_list);
1353 /* Add this decl to the current binding level and generate any
1354 needed code and RTL. */
1355 var_decl = pushdecl (var_decl);
1356 expand_decl (var_decl);
1358 if (DECL_CONTEXT (var_decl) != 0)
1359 expand_decl_init (var_decl);
1361 /* If this is volatile, force it into memory. */
1362 if (TREE_SIDE_EFFECTS (var_decl))
1363 mark_addressable (var_decl);
1365 if (TREE_CODE (var_decl) != CONST_DECL)
1366 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1368 if (assign_init != 0)
1370 /* If VAR_DECL has a padded type, convert it to the unpadded
1371 type so the assignment is done properly. */
1372 tree lhs = var_decl;
1374 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1375 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1376 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1378 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1385 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1386 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1387 this field is in a record type with a "pragma pack". If SIZE is nonzero
1388 it is the specified size for this field. If POS is nonzero, it is the bit
1389 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1390 the address of this field for aliasing purposes. */
1393 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1402 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1404 DECL_CONTEXT (field_decl) = record_type;
1405 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1407 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1408 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1409 If it is a padding type where the inner field is of variable size, it
1410 must be at its natural alignment. Just handle the packed case here; we
1411 will disallow non-aligned rep clauses elsewhere. */
1412 if (packed && TYPE_MODE (field_type) == BLKmode)
1413 DECL_ALIGN (field_decl)
1414 = ((TREE_CODE (field_type) == RECORD_TYPE
1415 && TYPE_IS_PADDING_P (field_type)
1416 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1417 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1419 /* If a size is specified, use it. Otherwise, see if we have a size
1420 to use that may differ from the natural size of the object. */
1422 size = convert (bitsizetype, size);
1425 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1426 TYPE_SIZE (field_type), 0))
1427 size = rm_size (field_type);
1429 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1431 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1432 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1433 size = round_up (size, BITS_PER_UNIT);
1436 /* Make a bitfield if a size is specified for two reasons: first if the size
1437 differs from the natural size. Second, if the alignment is insufficient.
1438 There are a number of ways the latter can be true. But never make a
1439 bitfield if the type of the field has a nonconstant size. */
1441 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1442 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1443 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1445 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1446 bitsize_int (TYPE_ALIGN
1449 || (TYPE_ALIGN (record_type) != 0
1450 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1452 DECL_BIT_FIELD (field_decl) = 1;
1453 DECL_SIZE (field_decl) = size;
1454 if (! packed && pos == 0)
1455 DECL_ALIGN (field_decl)
1456 = (TYPE_ALIGN (record_type) != 0
1457 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1458 : TYPE_ALIGN (field_type));
1461 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1462 DECL_ALIGN (field_decl)
1463 = MAX (DECL_ALIGN (field_decl),
1464 DECL_BIT_FIELD (field_decl) ? 1
1465 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1466 : TYPE_ALIGN (field_type));
1470 /* We need to pass in the alignment the DECL is known to have.
1471 This is the lowest-order bit set in POS, but no more than
1472 the alignment of the record, if one is specified. Note
1473 that an alignment of 0 is taken as infinite. */
1474 unsigned int known_align;
1476 if (host_integerp (pos, 1))
1477 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1479 known_align = BITS_PER_UNIT;
1481 if (TYPE_ALIGN (record_type)
1482 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1483 known_align = TYPE_ALIGN (record_type);
1485 layout_decl (field_decl, known_align);
1486 SET_DECL_OFFSET_ALIGN (field_decl, BIGGEST_ALIGNMENT);
1487 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1488 &DECL_FIELD_BIT_OFFSET (field_decl),
1489 BIGGEST_ALIGNMENT, pos);
1491 DECL_HAS_REP_P (field_decl) = 1;
1494 /* Mark the decl as nonaddressable if it either is indicated so semantically
1495 or if it is a bit field. */
1496 DECL_NONADDRESSABLE_P (field_decl)
1497 = ! addressable || DECL_BIT_FIELD (field_decl);
1502 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1503 effects, has the value of zero. */
1509 if (TREE_CODE (exp) == COMPOUND_EXPR)
1510 return value_zerop (TREE_OPERAND (exp, 1));
1512 return integer_zerop (exp);
1515 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1516 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1517 readonly (either an IN parameter or an address of a pass-by-ref
1521 create_param_decl (param_name, param_type, readonly)
1526 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1528 DECL_ARG_TYPE (param_decl) = param_type;
1529 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1530 TREE_READONLY (param_decl) = readonly;
1534 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1537 process_attributes (decl, attr_list)
1539 struct attrib *attr_list;
1541 for (; attr_list; attr_list = attr_list->next)
1542 switch (attr_list->type)
1544 case ATTR_MACHINE_ATTRIBUTE:
1545 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1547 ATTR_FLAG_TYPE_IN_PLACE);
1550 case ATTR_LINK_ALIAS:
1551 TREE_STATIC (decl) = 1;
1552 assemble_alias (decl, attr_list->name);
1555 case ATTR_WEAK_EXTERNAL:
1557 declare_weak (decl);
1559 post_error ("?weak declarations not supported on this target",
1560 attr_list->error_point);
1563 case ATTR_LINK_SECTION:
1564 #ifdef ASM_OUTPUT_SECTION_NAME
1565 DECL_SECTION_NAME (decl)
1566 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1567 IDENTIFIER_POINTER (attr_list->name));
1568 DECL_COMMON (decl) = 0;
1570 post_error ("?section attributes are not supported for this target",
1571 attr_list->error_point);
1577 /* Add some pending elaborations on the list. */
1580 add_pending_elaborations (var_decl, var_init)
1585 Check_Elaboration_Code_Allowed (error_gnat_node);
1587 pending_elaborations
1588 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1591 /* Obtain any pending elaborations and clear the old list. */
1594 get_pending_elaborations ()
1596 /* Each thing added to the list went on the end; we want it on the
1598 tree result = TREE_CHAIN (pending_elaborations);
1600 TREE_CHAIN (pending_elaborations) = 0;
1604 /* Mark the binding level stack. */
1607 mark_binding_level (arg)
1610 struct binding_level *level = *(struct binding_level **) arg;
1612 for (; level != 0; level = level->level_chain)
1614 ggc_mark_tree (level->names);
1615 ggc_mark_tree (level->blocks);
1616 ggc_mark_tree (level->this_block);
1620 /* Mark the pending elaboration list. */
1626 struct e_stack *p = *((struct e_stack **) data);
1630 ggc_mark_tree (p->elab_list);
1631 mark_e_stack (&p->next);
1635 /* Return nonzero if there are pending elaborations. */
1638 pending_elaborations_p ()
1640 return TREE_CHAIN (pending_elaborations) != 0;
1643 /* Save a copy of the current pending elaboration list and make a new
1647 push_pending_elaborations ()
1649 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1651 p->next = elist_stack;
1652 p->elab_list = pending_elaborations;
1654 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1657 /* Pop the stack of pending elaborations. */
1660 pop_pending_elaborations ()
1662 struct e_stack *p = elist_stack;
1664 pending_elaborations = p->elab_list;
1665 elist_stack = p->next;
1669 /* Return the current position in pending_elaborations so we can insert
1670 elaborations after that point. */
1673 get_elaboration_location ()
1675 return tree_last (pending_elaborations);
1678 /* Insert the current elaborations after ELAB, which is in some elaboration
1682 insert_elaboration_list (elab)
1685 tree next = TREE_CHAIN (elab);
1687 if (TREE_CHAIN (pending_elaborations))
1689 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1690 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1691 TREE_CHAIN (pending_elaborations) = 0;
1695 /* Returns a LABEL_DECL node for LABEL_NAME. */
1698 create_label_decl (label_name)
1701 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1703 DECL_CONTEXT (label_decl) = current_function_decl;
1704 DECL_MODE (label_decl) = VOIDmode;
1705 DECL_SOURCE_LINE (label_decl) = lineno;
1706 DECL_SOURCE_FILE (label_decl) = input_filename;
1711 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1712 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1713 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1714 PARM_DECL nodes chained through the TREE_CHAIN field).
1716 INLINE_FLAG, PUBLIC_FLAG, and EXTERN_FLAG are used to set the appropriate
1717 fields in the FUNCTION_DECL. */
1720 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1721 inline_flag, public_flag, extern_flag, attr_list)
1725 tree param_decl_list;
1729 struct attrib *attr_list;
1731 tree return_type = TREE_TYPE (subprog_type);
1732 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1734 /* If this is a function nested inside an inlined external function, it
1735 means we aren't going to compile the outer function unless it is
1736 actually inlined, so do the same for us. */
1737 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1738 && DECL_EXTERNAL (current_function_decl))
1741 DECL_EXTERNAL (subprog_decl) = extern_flag;
1742 TREE_PUBLIC (subprog_decl) = public_flag;
1743 DECL_INLINE (subprog_decl) = inline_flag;
1744 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1745 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1746 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1747 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1748 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1751 DECL_ASSEMBLER_NAME (subprog_decl) = asm_name;
1753 process_attributes (subprog_decl, attr_list);
1755 /* Add this decl to the current binding level. */
1756 subprog_decl = pushdecl (subprog_decl);
1758 /* Output the assembler code and/or RTL for the declaration. */
1759 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1761 return subprog_decl;
1764 /* Count how deep we are into nested functions. This is because
1765 we shouldn't call the backend function context routines unless we
1766 are in a nested function. */
1768 static int function_nesting_depth;
1770 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1771 body. This routine needs to be invoked before processing the declarations
1772 appearing in the subprogram. */
1775 begin_subprog_body (subprog_decl)
1778 tree param_decl_list;
1782 if (function_nesting_depth++ != 0)
1783 push_function_context ();
1785 announce_function (subprog_decl);
1787 /* Make this field nonzero so further routines know that this is not
1788 tentative. error_mark_node is replaced below (in poplevel) with the
1790 DECL_INITIAL (subprog_decl) = error_mark_node;
1792 /* This function exists in static storage. This does not mean `static' in
1794 TREE_STATIC (subprog_decl) = 1;
1796 /* Enter a new binding level. */
1797 current_function_decl = subprog_decl;
1800 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1801 subprogram body) so that they can be recognized as local variables in the
1804 The list of PARM_DECL nodes is stored in the right order in
1805 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1806 which they are transmitted to `pushdecl' we need to reverse the list of
1807 PARM_DECLs if we want it to be stored in the right order. The reason why
1808 we want to make sure the PARM_DECLs are stored in the correct order is
1809 that this list will be retrieved in a few lines with a call to `getdecl'
1810 to store it back into the DECL_ARGUMENTS field. */
1811 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1813 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1815 next_param = TREE_CHAIN (param_decl);
1816 TREE_CHAIN (param_decl) = NULL;
1817 pushdecl (param_decl);
1820 /* Store back the PARM_DECL nodes. They appear in the right order. */
1821 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1823 init_function_start (subprog_decl, input_filename, lineno);
1824 expand_function_start (subprog_decl, 0);
1828 /* Finish the definition of the current subprogram and compile it all the way
1829 to assembler language output. */
1832 end_subprog_body (void)
1838 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1839 = current_function_decl;
1841 /* Mark the RESULT_DECL as being in this subprogram. */
1842 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1844 expand_function_end (input_filename, lineno, 0);
1845 rest_of_compilation (current_function_decl);
1848 /* If we're sure this function is defined in this file then mark it
1850 if (TREE_ASM_WRITTEN (current_function_decl))
1851 mark_fn_defined_in_this_file (current_function_decl);
1854 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1855 not be seen when we call this function and will be in
1856 unallocated memory anyway. */
1857 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1858 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1859 TREE_VALUE (cico_list) = 0;
1861 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1863 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1864 was saved for inline, in which case the DECL_RTLs are in
1865 preserved memory. */
1866 for (decl = DECL_ARGUMENTS (current_function_decl);
1867 decl != 0; decl = TREE_CHAIN (decl))
1869 SET_DECL_RTL (decl, 0);
1870 DECL_INCOMING_RTL (decl) = 0;
1873 /* Similarly, discard DECL_RTL of the return value. */
1874 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1876 /* But DECL_INITIAL must remain nonzero so we know this
1877 was an actual function definition unless toplev.c decided not
1879 if (DECL_INITIAL (current_function_decl) != 0)
1880 DECL_INITIAL (current_function_decl) = error_mark_node;
1882 DECL_ARGUMENTS (current_function_decl) = 0;
1885 /* If we are not at the bottom of the function nesting stack, pop up to
1886 the containing function. Otherwise show we aren't in any function. */
1887 if (--function_nesting_depth != 0)
1888 pop_function_context ();
1890 current_function_decl = 0;
1893 /* Return a definition for a builtin function named NAME and whose data type
1894 is TYPE. TYPE should be a function type with argument types.
1895 FUNCTION_CODE tells later passes how to compile calls to this function.
1896 See tree.h for its possible values.
1898 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1899 the name to be called if we can't opencode the function. */
1902 builtin_function (name, type, function_code, class, library_name)
1906 enum built_in_class class;
1907 const char *library_name;
1909 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1911 DECL_EXTERNAL (decl) = 1;
1912 TREE_PUBLIC (decl) = 1;
1914 DECL_ASSEMBLER_NAME (decl) = get_identifier (library_name);
1917 DECL_BUILT_IN_CLASS (decl) = class;
1918 DECL_FUNCTION_CODE (decl) = function_code;
1922 /* Return an integer type with the number of bits of precision given by
1923 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1924 it is a signed type. */
1927 type_for_size (precision, unsignedp)
1934 if (precision <= 2 * MAX_BITS_PER_WORD
1935 && signed_and_unsigned_types[precision][unsignedp] != 0)
1936 return signed_and_unsigned_types[precision][unsignedp];
1939 t = make_unsigned_type (precision);
1941 t = make_signed_type (precision);
1943 if (precision <= 2 * MAX_BITS_PER_WORD)
1944 signed_and_unsigned_types[precision][unsignedp] = t;
1946 if (TYPE_NAME (t) == 0)
1948 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1949 TYPE_NAME (t) = get_identifier (type_name);
1955 /* Likewise for floating-point types. */
1958 float_type_for_size (precision, mode)
1960 enum machine_mode mode;
1965 if (float_types[(int) mode] != 0)
1966 return float_types[(int) mode];
1968 float_types[(int) mode] = t = make_node (REAL_TYPE);
1969 TYPE_PRECISION (t) = precision;
1972 if (TYPE_MODE (t) != mode)
1975 if (TYPE_NAME (t) == 0)
1977 sprintf (type_name, "FLOAT_%d", precision);
1978 TYPE_NAME (t) = get_identifier (type_name);
1984 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1985 an unsigned type; otherwise a signed type is returned. */
1988 type_for_mode (mode, unsignedp)
1989 enum machine_mode mode;
1992 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1993 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
1995 return type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1998 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2001 unsigned_type (type_node)
2004 tree type = type_for_size (TYPE_PRECISION (type_node), 1);
2006 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2008 type = copy_node (type);
2009 TREE_TYPE (type) = type_node;
2011 else if (TREE_TYPE (type_node) != 0
2012 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2013 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2015 type = copy_node (type);
2016 TREE_TYPE (type) = TREE_TYPE (type_node);
2022 /* Return the signed version of a TYPE_NODE, a scalar type. */
2025 signed_type (type_node)
2028 tree type = type_for_size (TYPE_PRECISION (type_node), 0);
2030 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2032 type = copy_node (type);
2033 TREE_TYPE (type) = type_node;
2035 else if (TREE_TYPE (type_node) != 0
2036 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2037 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2039 type = copy_node (type);
2040 TREE_TYPE (type) = TREE_TYPE (type_node);
2046 /* Return a type the same as TYPE except unsigned or signed according to
2050 signed_or_unsigned_type (unsignedp, type)
2054 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2057 return type_for_size (TYPE_PRECISION (type), unsignedp);
2060 /* EXP is an expression for the size of an object. If this size contains
2061 discriminant references, replace them with the maximum (if MAX_P) or
2062 minimum (if ! MAX_P) possible value of the discriminant. */
2065 max_size (exp, max_p)
2069 enum tree_code code = TREE_CODE (exp);
2070 tree type = TREE_TYPE (exp);
2072 switch (TREE_CODE_CLASS (code))
2079 if (code == TREE_LIST)
2080 return tree_cons (TREE_PURPOSE (exp),
2081 max_size (TREE_VALUE (exp), max_p),
2082 TREE_CHAIN (exp) != 0
2083 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2087 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2088 modify. Otherwise, we abort since it is something we can't
2090 if (! contains_placeholder_p (exp))
2093 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2095 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2098 return max_p ? size_one_node : size_zero_node;
2103 switch (TREE_CODE_LENGTH (code))
2106 if (code == NON_LVALUE_EXPR)
2107 return max_size (TREE_OPERAND (exp, 0), max_p);
2110 fold (build1 (code, type,
2111 max_size (TREE_OPERAND (exp, 0),
2112 code == NEGATE_EXPR ? ! max_p : max_p)));
2115 if (code == RTL_EXPR)
2117 else if (code == COMPOUND_EXPR)
2118 return max_size (TREE_OPERAND (exp, 1), max_p);
2119 else if (code == WITH_RECORD_EXPR)
2123 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2124 tree rhs = max_size (TREE_OPERAND (exp, 1),
2125 code == MINUS_EXPR ? ! max_p : max_p);
2127 /* Special-case wanting the maximum value of a MIN_EXPR.
2128 In that case, if one side overflows, return the other.
2129 sizetype is signed, but we know sizes are non-negative.
2130 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2131 overflowing or the maximum possible value and the RHS
2133 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2135 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2137 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2138 && (TREE_OVERFLOW (lhs)
2139 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2140 && ! TREE_CONSTANT (rhs))
2143 return fold (build (code, type, lhs, rhs));
2147 if (code == SAVE_EXPR)
2149 else if (code == COND_EXPR)
2150 return fold (build (MAX_EXPR, type,
2151 max_size (TREE_OPERAND (exp, 1), max_p),
2152 max_size (TREE_OPERAND (exp, 2), max_p)));
2153 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2154 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2155 max_size (TREE_OPERAND (exp, 1), max_p));
2162 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2163 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2164 Return a constructor for the template. */
2167 build_template (template_type, array_type, expr)
2172 tree template_elts = NULL_TREE;
2173 tree bound_list = NULL_TREE;
2176 if (TREE_CODE (array_type) == RECORD_TYPE
2177 && (TYPE_IS_PADDING_P (array_type)
2178 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2179 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2181 if (TREE_CODE (array_type) == ARRAY_TYPE
2182 || (TREE_CODE (array_type) == INTEGER_TYPE
2183 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2184 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2186 /* First make the list for a CONSTRUCTOR for the template. Go down the
2187 field list of the template instead of the type chain because this
2188 array might be an Ada array of arrays and we can't tell where the
2189 nested arrays stop being the underlying object. */
2191 for (field = TYPE_FIELDS (template_type); field;
2193 ? (bound_list = TREE_CHAIN (bound_list))
2194 : (array_type = TREE_TYPE (array_type))),
2195 field = TREE_CHAIN (TREE_CHAIN (field)))
2197 tree bounds, min, max;
2199 /* If we have a bound list, get the bounds from there. Likewise
2200 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2201 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2202 This will give us a maximum range. */
2203 if (bound_list != 0)
2204 bounds = TREE_VALUE (bound_list);
2205 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2206 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2207 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2208 && DECL_BY_COMPONENT_PTR_P (expr))
2209 bounds = TREE_TYPE (field);
2213 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2214 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2216 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2217 surround them with a WITH_RECORD_EXPR giving EXPR as the
2219 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2220 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2221 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2222 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2224 template_elts = tree_cons (TREE_CHAIN (field), max,
2225 tree_cons (field, min, template_elts));
2228 return build_constructor (template_type, nreverse (template_elts));
2231 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2232 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2233 in the type contains in its DECL_INITIAL the expression to use when
2234 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2235 to print out an error message if the mechanism cannot be applied to
2236 an object of that type and also for the name. */
2239 build_vms_descriptor (type, mech, gnat_entity)
2241 Mechanism_Type mech;
2242 Entity_Id gnat_entity;
2244 tree record_type = make_node (RECORD_TYPE);
2245 tree field_list = 0;
2254 /* If TYPE is an unconstrained array, use the underlying array type. */
2255 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2256 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2258 /* If this is an array, compute the number of dimensions in the array,
2259 get the index types, and point to the inner type. */
2260 if (TREE_CODE (type) != ARRAY_TYPE)
2263 for (ndim = 1, inner_type = type;
2264 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2265 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2266 ndim++, inner_type = TREE_TYPE (inner_type))
2269 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2271 if (mech != By_Descriptor_NCA
2272 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2273 for (i = ndim - 1, inner_type = type;
2275 i--, inner_type = TREE_TYPE (inner_type))
2276 idx_arr[i] = TYPE_DOMAIN (inner_type);
2278 for (i = 0, inner_type = type;
2280 i++, inner_type = TREE_TYPE (inner_type))
2281 idx_arr[i] = TYPE_DOMAIN (inner_type);
2283 /* Now get the DTYPE value. */
2284 switch (TREE_CODE (type))
2288 if (TYPE_VAX_FLOATING_POINT_P (type))
2289 switch ((int) TYPE_DIGITS_VALUE (type))
2302 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2305 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2308 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2311 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2314 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2317 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2323 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2327 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2328 && TYPE_VAX_FLOATING_POINT_P (type))
2329 switch ((int) TYPE_DIGITS_VALUE (type))
2341 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2352 /* Get the CLASS value. */
2355 case By_Descriptor_A:
2358 case By_Descriptor_NCA:
2361 case By_Descriptor_SB:
2368 /* Make the type for a descriptor for VMS. The first four fields
2369 are the same for all types. */
2372 = chainon (field_list,
2373 make_descriptor_field
2374 ("LENGTH", type_for_size (16, 1), record_type,
2375 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2377 field_list = chainon (field_list,
2378 make_descriptor_field ("DTYPE", type_for_size (8, 1),
2379 record_type, size_int (dtype)));
2380 field_list = chainon (field_list,
2381 make_descriptor_field ("CLASS", type_for_size (8, 1),
2382 record_type, size_int (class)));
2385 = chainon (field_list,
2386 make_descriptor_field ("POINTER",
2387 build_pointer_type (type),
2390 build_pointer_type (type),
2391 build (PLACEHOLDER_EXPR,
2397 case By_Descriptor_S:
2400 case By_Descriptor_SB:
2402 = chainon (field_list,
2403 make_descriptor_field
2404 ("SB_L1", type_for_size (32, 1), record_type,
2405 TREE_CODE (type) == ARRAY_TYPE
2406 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2408 = chainon (field_list,
2409 make_descriptor_field
2410 ("SB_L2", type_for_size (32, 1), record_type,
2411 TREE_CODE (type) == ARRAY_TYPE
2412 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2415 case By_Descriptor_A:
2416 case By_Descriptor_NCA:
2417 field_list = chainon (field_list,
2418 make_descriptor_field ("SCALE",
2419 type_for_size (8, 1),
2423 field_list = chainon (field_list,
2424 make_descriptor_field ("DIGITS",
2425 type_for_size (8, 1),
2430 = chainon (field_list,
2431 make_descriptor_field
2432 ("AFLAGS", type_for_size (8, 1), record_type,
2433 size_int (mech == By_Descriptor_NCA
2435 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2436 : (TREE_CODE (type) == ARRAY_TYPE
2437 && TYPE_CONVENTION_FORTRAN_P (type)
2440 field_list = chainon (field_list,
2441 make_descriptor_field ("DIMCT",
2442 type_for_size (8, 1),
2446 field_list = chainon (field_list,
2447 make_descriptor_field ("ARSIZE",
2448 type_for_size (32, 1),
2450 size_in_bytes (type)));
2452 /* Now build a pointer to the 0,0,0... element. */
2453 tem = build (PLACEHOLDER_EXPR, type);
2454 for (i = 0, inner_type = type; i < ndim;
2455 i++, inner_type = TREE_TYPE (inner_type))
2456 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2457 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2460 = chainon (field_list,
2461 make_descriptor_field
2462 ("A0", build_pointer_type (inner_type), record_type,
2463 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2465 /* Next come the addressing coefficients. */
2467 for (i = 0; i < ndim; i++)
2471 = size_binop (MULT_EXPR, tem,
2472 size_binop (PLUS_EXPR,
2473 size_binop (MINUS_EXPR,
2474 TYPE_MAX_VALUE (idx_arr[i]),
2475 TYPE_MIN_VALUE (idx_arr[i])),
2478 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2479 fname[1] = '0' + i, fname[2] = 0;
2480 field_list = chainon (field_list,
2481 make_descriptor_field (fname,
2482 type_for_size (32, 1),
2486 if (mech == By_Descriptor_NCA)
2490 /* Finally here are the bounds. */
2491 for (i = 0; i < ndim; i++)
2495 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2497 = chainon (field_list,
2498 make_descriptor_field
2499 (fname, type_for_size (32, 1), record_type,
2500 TYPE_MIN_VALUE (idx_arr[i])));
2504 = chainon (field_list,
2505 make_descriptor_field
2506 (fname, type_for_size (32, 1), record_type,
2507 TYPE_MAX_VALUE (idx_arr[i])));
2512 post_error ("unsupported descriptor type for &", gnat_entity);
2515 finish_record_type (record_type, field_list, 0, 1);
2516 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2522 /* Utility routine for above code to make a field. */
2525 make_descriptor_field (name, type, rec_type, initial)
2532 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2534 DECL_INITIAL (field) = initial;
2538 /* Build a type to be used to represent an aliased object whose nominal
2539 type is an unconstrained array. This consists of a RECORD_TYPE containing
2540 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2541 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2542 is used to represent an arbitrary unconstrained object. Use NAME
2543 as the name of the record. */
2546 build_unc_object_type (template_type, object_type, name)
2551 tree type = make_node (RECORD_TYPE);
2552 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2553 template_type, type, 0, 0, 0, 1);
2554 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2557 TYPE_NAME (type) = name;
2558 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2559 finish_record_type (type,
2560 chainon (chainon (NULL_TREE, template_field),
2567 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2568 the normal case this is just two adjustments, but we have more to do
2569 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2572 update_pointer_to (old_type, new_type)
2576 tree ptr = TYPE_POINTER_TO (old_type);
2577 tree ref = TYPE_REFERENCE_TO (old_type);
2580 /* If this is the main variant, process all the other variants first. */
2581 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2582 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2583 type = TYPE_NEXT_VARIANT (type))
2584 update_pointer_to (type, new_type);
2586 /* If no pointer or reference, we are done. Otherwise, get the new type with
2587 the same qualifiers as the old type and see if it is the same as the old
2589 if (ptr == 0 && ref == 0)
2592 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2593 if (old_type == new_type)
2596 /* First handle the simple case. */
2597 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2600 TREE_TYPE (ptr) = new_type;
2601 TYPE_POINTER_TO (new_type) = ptr;
2604 TREE_TYPE (ref) = new_type;
2605 TYPE_REFERENCE_TO (new_type) = ref;
2607 if (ptr != 0 && TYPE_NAME (ptr) != 0
2608 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2609 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2610 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2611 global_bindings_p (), 0);
2612 if (ref != 0 && TYPE_NAME (ref) != 0
2613 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2614 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2615 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2616 global_bindings_p (), 0);
2619 /* Now deal with the unconstrained array case. In this case the "pointer"
2620 is actually a RECORD_TYPE where the types of both fields are
2621 pointers to void. In that case, copy the field list from the
2622 old type to the new one and update the fields' context. */
2623 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2628 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2633 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2634 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2635 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2637 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2640 ??? This is now the only use of gnat_substitute_in_type, which
2641 is now a very "heavy" routine to do this, so it should be replaced
2643 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2644 new_ref = build (COMPONENT_REF, ptr_temp_type,
2645 build (PLACEHOLDER_EXPR, ptr),
2646 TREE_CHAIN (TYPE_FIELDS (ptr)));
2649 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2650 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2651 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2653 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2654 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2656 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2657 = TREE_TYPE (new_type) = ptr;
2659 /* Now handle updating the allocation record, what the thin pointer
2660 points to. Update all pointers from the old record into the new
2661 one, update the types of the fields, and recompute the size. */
2663 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2665 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2666 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2667 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2668 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2669 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2670 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2671 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2673 TYPE_SIZE (new_obj_rec)
2674 = size_binop (PLUS_EXPR,
2675 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2676 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2677 TYPE_SIZE_UNIT (new_obj_rec)
2678 = size_binop (PLUS_EXPR,
2679 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2680 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2681 rest_of_type_compilation (ptr, global_bindings_p ());
2685 /* Convert a pointer to a constrained array into a pointer to a fat
2686 pointer. This involves making or finding a template. */
2689 convert_to_fat_pointer (type, expr)
2693 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2694 tree template, template_addr;
2695 tree etype = TREE_TYPE (expr);
2697 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2698 pointer to the template and array. */
2699 if (integer_zerop (expr))
2703 tree_cons (TYPE_FIELDS (type),
2704 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2705 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2706 convert (build_pointer_type (template_type),
2710 /* If EXPR is a thin pointer, make the template and data from the record. */
2712 else if (TYPE_THIN_POINTER_P (etype))
2714 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2716 expr = save_expr (expr);
2717 if (TREE_CODE (expr) == ADDR_EXPR)
2718 expr = TREE_OPERAND (expr, 0);
2720 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2722 template = build_component_ref (expr, NULL_TREE, fields);
2723 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2724 build_component_ref (expr, NULL_TREE,
2725 TREE_CHAIN (fields)));
2728 /* Otherwise, build the constructor for the template. */
2729 template = build_template (template_type, TREE_TYPE (etype), expr);
2731 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2733 /* The result is a CONSTRUCTOR for the fat pointer. */
2735 build_constructor (type,
2736 tree_cons (TYPE_FIELDS (type), expr,
2737 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2738 template_addr, NULL_TREE)));
2741 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2742 is something that is a fat pointer, so convert to it first if it EXPR
2743 is not already a fat pointer. */
2746 convert_to_thin_pointer (type, expr)
2750 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2752 = convert_to_fat_pointer
2753 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2755 /* We get the pointer to the data and use a NOP_EXPR to make it the
2757 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2758 expr = build1 (NOP_EXPR, type, expr);
2763 /* Create an expression whose value is that of EXPR,
2764 converted to type TYPE. The TREE_TYPE of the value
2765 is always TYPE. This function implements all reasonable
2766 conversions; callers should filter out those that are
2767 not permitted by the language being compiled. */
2770 convert (type, expr)
2773 enum tree_code code = TREE_CODE (type);
2774 tree etype = TREE_TYPE (expr);
2775 enum tree_code ecode = TREE_CODE (etype);
2778 /* If EXPR is already the right type, we are done. */
2782 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2784 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2785 return build (WITH_RECORD_EXPR, type,
2786 convert (type, TREE_OPERAND (expr, 0)),
2787 TREE_OPERAND (expr, 1));
2789 /* If the input type has padding, remove it by doing a component reference
2790 to the field. If the output type has padding, make a constructor
2791 to build the record. If both input and output have padding and are
2792 of variable size, do this as an unchecked conversion. */
2793 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2794 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2795 && (! TREE_CONSTANT (TYPE_SIZE (type))
2796 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2798 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2800 /* If we have just converted to this padded type, just get
2801 the inner expression. */
2802 if (TREE_CODE (expr) == CONSTRUCTOR
2803 && CONSTRUCTOR_ELTS (expr) != 0
2804 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2805 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2807 return convert (type, build_component_ref (expr, NULL_TREE,
2808 TYPE_FIELDS (etype)));
2810 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2812 /* If we previously converted from another type and our type is
2813 of variable size, remove the conversion to avoid the need for
2814 variable-size temporaries. */
2815 if (TREE_CODE (expr) == UNCHECKED_CONVERT_EXPR
2816 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2817 expr = TREE_OPERAND (expr, 0);
2819 /* If we are just removing the padding from expr, convert the original
2820 object if we have variable size. That will avoid the need
2821 for some variable-size temporaries. */
2822 if (TREE_CODE (expr) == COMPONENT_REF
2823 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2824 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2825 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2826 return convert (type, TREE_OPERAND (expr, 0));
2828 /* If the result type is a padded type with a self-referentially-sized
2829 field and the expression type is a record, do this as an
2830 unchecked converstion. */
2831 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2832 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2833 && TREE_CODE (etype) == RECORD_TYPE)
2834 return unchecked_convert (type, expr);
2838 build_constructor (type,
2839 tree_cons (TYPE_FIELDS (type),
2841 (TYPE_FIELDS (type)),
2846 /* If the input is a biased type, adjust first. */
2847 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2848 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2849 fold (build1 (GNAT_NOP_EXPR,
2850 TREE_TYPE (etype), expr)),
2851 TYPE_MIN_VALUE (etype))));
2853 /* If the input is a left-justified modular type, we need to extract
2854 the actual object before converting it to any other type with the
2855 exception of an unconstrained array. */
2856 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2857 && code != UNCONSTRAINED_ARRAY_TYPE)
2858 return convert (type, build_component_ref (expr, NULL_TREE,
2859 TYPE_FIELDS (etype)));
2861 /* If converting a type that does not contain a template into one
2862 that does, convert to the data type and then build the template. */
2863 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2864 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2866 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2871 tree_cons (TYPE_FIELDS (type),
2872 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2873 obj_type, NULL_TREE),
2874 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2875 convert (obj_type, expr), NULL_TREE)));
2878 /* There are some special cases of expressions that we process
2880 switch (TREE_CODE (expr))
2885 case TRANSFORM_EXPR:
2887 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2888 conversion in gnat_expand_expr. NULL_EXPR does not represent
2889 and actual value, so no conversion is needed. */
2890 TREE_TYPE (expr) = type;
2895 /* If we are converting a STRING_CST to another constrained array type,
2896 just make a new one in the proper type. Likewise for a
2897 CONSTRUCTOR. But if the mode of the type is different, we must
2898 ensure a new RTL is made for the constant. */
2899 if (code == ecode && AGGREGATE_TYPE_P (etype)
2900 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2901 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2903 expr = copy_node (expr);
2904 TREE_TYPE (expr) = type;
2906 if (TYPE_MODE (type) != TYPE_MODE (etype))
2907 TREE_CST_RTL (expr) = 0;
2914 /* If we are converting between two aggregate types of the same
2915 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2916 This avoid unneeded conversions which makes reference computations
2918 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2919 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2920 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2921 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2922 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2923 TREE_OPERAND (expr, 1));
2927 case UNCONSTRAINED_ARRAY_REF:
2928 /* Convert this to the type of the inner array by getting the address of
2929 the array from the template. */
2930 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2931 build_component_ref (TREE_OPERAND (expr, 0),
2932 get_identifier ("P_ARRAY"),
2934 etype = TREE_TYPE (expr);
2935 ecode = TREE_CODE (etype);
2938 case UNCHECKED_CONVERT_EXPR:
2939 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2940 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2941 return convert (type, TREE_OPERAND (expr, 0));
2945 /* If both types are record types, just convert the pointer and
2946 make a new INDIRECT_REF.
2948 ??? Disable this for now since it causes problems with the
2949 code in build_binary_op for MODIFY_EXPR which wants to
2950 strip off conversions. But that code really is a mess and
2951 we need to do this a much better way some time. */
2953 && (TREE_CODE (type) == RECORD_TYPE
2954 || TREE_CODE (type) == UNION_TYPE)
2955 && (TREE_CODE (etype) == RECORD_TYPE
2956 || TREE_CODE (etype) == UNION_TYPE)
2957 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2958 return build_unary_op (INDIRECT_REF, NULL_TREE,
2959 convert (build_pointer_type (type),
2960 TREE_OPERAND (expr, 0)));
2967 /* Check for converting to a pointer to an unconstrained array. */
2968 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2969 return convert_to_fat_pointer (type, expr);
2971 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2972 || (code == INTEGER_CST && ecode == INTEGER_CST
2973 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2974 return fold (build1 (NOP_EXPR, type, expr));
2979 return build1 (CONVERT_EXPR, type, expr);
2982 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2983 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2984 return unchecked_convert (type, expr);
2985 else if (TYPE_BIASED_REPRESENTATION_P (type))
2986 return fold (build1 (CONVERT_EXPR, type,
2987 fold (build (MINUS_EXPR, TREE_TYPE (type),
2988 convert (TREE_TYPE (type), expr),
2989 TYPE_MIN_VALUE (type)))));
2991 /* ... fall through ... */
2994 return fold (convert_to_integer (type, expr));
2997 case REFERENCE_TYPE:
2998 /* If converting between two pointers to records denoting
2999 both a template and type, adjust if needed to account
3000 for any differing offsets, since one might be negative. */
3001 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3004 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3005 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3006 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3007 sbitsize_int (BITS_PER_UNIT));
3009 expr = build1 (NOP_EXPR, type, expr);
3010 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3011 if (integer_zerop (byte_diff))
3014 return build_binary_op (PLUS_EXPR, type, expr,
3015 fold (convert_to_pointer (type, byte_diff)));
3018 /* If converting to a thin pointer, handle specially. */
3019 if (TYPE_THIN_POINTER_P (type)
3020 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3021 return convert_to_thin_pointer (type, expr);
3023 /* If converting fat pointer to normal pointer, get the pointer to the
3024 array and then convert it. */
3025 else if (TYPE_FAT_POINTER_P (etype))
3026 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3029 return fold (convert_to_pointer (type, expr));
3032 return fold (convert_to_real (type, expr));
3035 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3038 (type, tree_cons (TYPE_FIELDS (type),
3039 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3042 /* ... fall through ... */
3045 /* In these cases, assume the front-end has validated the conversion.
3046 If the conversion is valid, it will be a bit-wise conversion, so
3047 it can be viewed as an unchecked conversion. */
3048 return unchecked_convert (type, expr);
3051 /* Just validate that the type is indeed that of a field
3052 of the type. Then make the simple conversion. */
3053 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3054 if (TREE_TYPE (tem) == etype)
3055 return build1 (CONVERT_EXPR, type, expr);
3059 case UNCONSTRAINED_ARRAY_TYPE:
3060 /* If EXPR is a constrained array, take its address, convert it to a
3061 fat pointer, and then dereference it. Likewise if EXPR is a
3062 record containing both a template and a constrained array.
3063 Note that a record representing a left justified modular type
3064 always represents a packed constrained array. */
3065 if (ecode == ARRAY_TYPE
3066 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3067 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3068 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3071 (INDIRECT_REF, NULL_TREE,
3072 convert_to_fat_pointer (TREE_TYPE (type),
3073 build_unary_op (ADDR_EXPR,
3076 /* Do something very similar for converting one unconstrained
3077 array to another. */
3078 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3080 build_unary_op (INDIRECT_REF, NULL_TREE,
3081 convert (TREE_TYPE (type),
3082 build_unary_op (ADDR_EXPR,
3088 return fold (convert_to_complex (type, expr));
3095 /* Remove all conversions that are done in EXP. This includes converting
3096 from a padded type or converting to a left-justified modular type. */
3099 remove_conversions (exp)
3102 switch (TREE_CODE (exp))
3105 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3106 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3107 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)));
3111 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3112 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3113 return remove_conversions (TREE_OPERAND (exp, 0));
3116 case UNCHECKED_CONVERT_EXPR:
3117 case NOP_EXPR: case CONVERT_EXPR:
3118 return remove_conversions (TREE_OPERAND (exp, 0));
3127 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3128 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3129 likewise return an expression pointing to the underlying array. */
3132 maybe_unconstrained_array (exp)
3135 enum tree_code code = TREE_CODE (exp);
3138 switch (TREE_CODE (TREE_TYPE (exp)))
3140 case UNCONSTRAINED_ARRAY_TYPE:
3141 if (code == UNCONSTRAINED_ARRAY_REF)
3144 = build_unary_op (INDIRECT_REF, NULL_TREE,
3145 build_component_ref (TREE_OPERAND (exp, 0),
3146 get_identifier ("P_ARRAY"),
3148 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3152 else if (code == NULL_EXPR)
3153 return build1 (NULL_EXPR,
3154 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3155 (TREE_TYPE (TREE_TYPE (exp))))),
3156 TREE_OPERAND (exp, 0));
3158 else if (code == WITH_RECORD_EXPR
3159 && (TREE_OPERAND (exp, 0)
3160 != (new = maybe_unconstrained_array
3161 (TREE_OPERAND (exp, 0)))))
3162 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3163 TREE_OPERAND (exp, 1));
3166 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3169 = build_component_ref (exp, NULL_TREE,
3170 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3171 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3172 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3173 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3186 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3189 unchecked_convert (type, expr)
3193 tree etype = TREE_TYPE (expr);
3195 /* If the expression is already the right type, we are done. */
3199 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3201 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3202 return build (WITH_RECORD_EXPR, type,
3203 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3204 TREE_OPERAND (expr, 1));
3206 /* If both types types are integral just do a normal conversion.
3207 Likewise for a conversion to an unconstrained array. */
3208 if ((((INTEGRAL_TYPE_P (type)
3209 && ! (TREE_CODE (type) == INTEGER_TYPE
3210 && TYPE_VAX_FLOATING_POINT_P (type)))
3211 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3212 || (TREE_CODE (type) == RECORD_TYPE
3213 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3214 && ((INTEGRAL_TYPE_P (etype)
3215 && ! (TREE_CODE (etype) == INTEGER_TYPE
3216 && TYPE_VAX_FLOATING_POINT_P (etype)))
3217 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3218 || (TREE_CODE (etype) == RECORD_TYPE
3219 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3220 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3224 if (TREE_CODE (etype) == INTEGER_TYPE
3225 && TYPE_BIASED_REPRESENTATION_P (etype))
3227 tree ntype = copy_type (etype);
3229 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3230 TYPE_MAIN_VARIANT (ntype) = ntype;
3231 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3234 if (TREE_CODE (type) == INTEGER_TYPE
3235 && TYPE_BIASED_REPRESENTATION_P (type))
3237 rtype = copy_type (type);
3238 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3239 TYPE_MAIN_VARIANT (rtype) = rtype;
3242 expr = convert (rtype, expr);
3244 expr = build1 (GNAT_NOP_EXPR, type, expr);
3247 /* If we are converting TO an integral type whose precision is not the
3248 same as its size, first unchecked convert to a record that contains
3249 an object of the output type. Then extract the field. */
3250 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3251 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3252 GET_MODE_BITSIZE (TYPE_MODE (type))))
3254 tree rec_type = make_node (RECORD_TYPE);
3255 tree field = create_field_decl (get_identifier ("OBJ"), type,
3256 rec_type, 1, 0, 0, 0);
3258 TYPE_FIELDS (rec_type) = field;
3259 layout_type (rec_type);
3261 expr = unchecked_convert (rec_type, expr);
3262 expr = build_component_ref (expr, NULL_TREE, field);
3265 /* Similarly for integral input type whose precision is not equal to its
3267 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3268 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3269 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3271 tree rec_type = make_node (RECORD_TYPE);
3273 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3276 TYPE_FIELDS (rec_type) = field;
3277 layout_type (rec_type);
3279 expr = build_constructor (rec_type, build_tree_list (field, expr));
3280 expr = unchecked_convert (type, expr);
3283 /* We have a special case when we are converting between two
3284 unconstrained array types. In that case, take the address,
3285 convert the fat pointer types, and dereference. */
3286 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3287 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3288 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3289 build1 (UNCHECKED_CONVERT_EXPR, TREE_TYPE (type),
3290 build_unary_op (ADDR_EXPR, NULL_TREE,
3293 /* If both types are aggregates with the same mode and alignment (except
3294 if the result is a UNION_TYPE), we can do this as a normal conversion. */
3295 else if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3296 && TREE_CODE (type) != UNION_TYPE
3297 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3298 && TYPE_MODE (type) == TYPE_MODE (etype))
3299 expr = build1 (CONVERT_EXPR, type, expr);
3303 expr = maybe_unconstrained_array (expr);
3304 etype = TREE_TYPE (expr);
3305 expr = build1 (UNCHECKED_CONVERT_EXPR, type, expr);
3309 /* If the result is an integral type whose size is not equal to
3310 the size of the underlying machine type, sign- or zero-extend
3311 the result. We need not do this in the case where the input is
3312 an integral type of the same precision and signedness or if the output
3313 is a biased type or if both the input and output are unsigned. */
3314 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3315 && ! (TREE_CODE (type) == INTEGER_TYPE
3316 && TYPE_BIASED_REPRESENTATION_P (type))
3317 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3318 GET_MODE_BITSIZE (TYPE_MODE (type)))
3319 && ! (INTEGRAL_TYPE_P (etype)
3320 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3321 && operand_equal_p (TYPE_RM_SIZE (type),
3322 (TYPE_RM_SIZE (etype) != 0
3323 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3325 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3327 tree base_type = type_for_mode (TYPE_MODE (type), TREE_UNSIGNED (type));
3329 = convert (base_type,
3330 size_binop (MINUS_EXPR,
3332 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3333 TYPE_RM_SIZE (type)));
3336 build_binary_op (RSHIFT_EXPR, base_type,
3337 build_binary_op (LSHIFT_EXPR, base_type,
3338 convert (base_type, expr),
3343 /* An unchecked conversion should never raise Constraint_Error. The code
3344 below assumes that GCC's conversion routines overflow the same
3345 way that the underlying hardware does. This is probably true. In
3346 the rare case when it isn't, we can rely on the fact that such
3347 conversions are erroneous anyway. */
3348 if (TREE_CODE (expr) == INTEGER_CST)
3349 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3351 /* If the sizes of the types differ and this is an UNCHECKED_CONVERT_EXPR,
3352 show no longer constant. */
3353 if (TREE_CODE (expr) == UNCHECKED_CONVERT_EXPR
3354 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3355 TREE_CONSTANT (expr) = 0;