1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
26 ****************************************************************************/
52 #ifndef MAX_FIXED_MODE_SIZE
53 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
56 #ifndef MAX_BITS_PER_WORD
57 #define MAX_BITS_PER_WORD BITS_PER_WORD
60 /* If nonzero, pretend we are allocating at global level. */
63 /* Tree nodes for the various types and decls we create. */
64 tree gnat_std_decls[(int) ADT_LAST];
66 /* Functions to call for each of the possible raise reasons. */
67 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
69 /* Associates a GNAT tree node to a GCC tree node. It is used in
70 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
71 of `save_gnu_tree' for more info. */
72 static tree *associate_gnat_to_gnu;
74 /* This listhead is used to record any global objects that need elaboration.
75 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
76 initial value to assign. */
78 static tree pending_elaborations;
80 /* This stack allows us to momentarily switch to generating elaboration
81 lists for an inner context. */
83 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
85 /* This variable keeps a table for types for each precision so that we only
86 allocate each of them once. Signed and unsigned types are kept separate.
88 Note that these types are only used when fold-const requests something
89 special. Perhaps we should NOT share these types; we'll see how it
91 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
93 /* Likewise for float types, but record these by mode. */
94 static tree float_types[NUM_MACHINE_MODES];
96 /* For each binding contour we allocate a binding_level structure which records
97 the entities defined or declared in that contour. Contours include:
100 one for each subprogram definition
101 one for each compound statement (declare block)
103 Binding contours are used to create GCC tree BLOCK nodes. */
107 /* A chain of ..._DECL nodes for all variables, constants, functions,
108 parameters and type declarations. These ..._DECL nodes are chained
109 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
110 in the reverse of the order supplied to be compatible with the
113 /* For each level (except the global one), a chain of BLOCK nodes for all
114 the levels that were entered and exited one level down from this one. */
116 /* The BLOCK node for this level, if one has been preallocated.
117 If 0, the BLOCK is allocated (if needed) when the level is popped. */
119 /* The binding level containing this one (the enclosing binding level). */
120 struct binding_level *level_chain;
123 /* The binding level currently in effect. */
124 static struct binding_level *current_binding_level = NULL;
126 /* A chain of binding_level structures awaiting reuse. */
127 static struct binding_level *free_binding_level = NULL;
129 /* The outermost binding level. This binding level is created when the
130 compiler is started and it will exist through the entire compilation. */
131 static struct binding_level *global_binding_level;
133 /* Binding level structures are initialized by copying this one. */
134 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
136 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
137 static tree compute_related_constant PARAMS ((tree, tree));
138 static tree split_plus PARAMS ((tree, tree *));
139 static int value_zerop PARAMS ((tree));
140 static tree float_type_for_size PARAMS ((int, enum machine_mode));
141 static tree convert_to_fat_pointer PARAMS ((tree, tree));
142 static tree convert_to_thin_pointer PARAMS ((tree, tree));
143 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
145 static void mark_binding_level PARAMS ((PTR));
146 static void mark_e_stack PARAMS ((PTR));
148 /* Initialize the association of GNAT nodes to GCC trees. */
155 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
156 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
158 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
159 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
161 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
162 ggc_add_tree_root (&pending_elaborations, 1);
163 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
164 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
165 (sizeof signed_and_unsigned_types
166 / sizeof signed_and_unsigned_types[0][0]));
167 ggc_add_tree_root (float_types, ARRAY_SIZE (float_types));
169 ggc_add_root (¤t_binding_level, 1, sizeof current_binding_level,
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
177 If GNU_DECL is zero, a previous association is to be reset. */
180 save_gnu_tree (gnat_entity, gnu_decl, no_check)
181 Entity_Id gnat_entity;
186 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
187 || (! no_check && ! DECL_P (gnu_decl))))
190 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
201 get_gnu_tree (gnat_entity)
202 Entity_Id gnat_entity;
204 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
207 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
213 present_gnu_tree (gnat_entity)
214 Entity_Id gnat_entity;
216 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
220 /* Return non-zero if we are currently in the global binding level. */
225 return (force_global != 0 || current_binding_level == global_binding_level
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
235 return current_binding_level->names;
238 /* Nonzero if the current level needs to have a BLOCK made. */
243 return (current_binding_level->names != 0);
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
251 int ignore ATTRIBUTE_UNUSED;
253 struct binding_level *newlevel = NULL;
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level)
258 newlevel = free_binding_level;
259 free_binding_level = free_binding_level->level_chain;
263 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
265 *newlevel = clear_binding_level;
267 /* Add this level to the front of the chain (stack) of levels that are
269 newlevel->level_chain = current_binding_level;
270 current_binding_level = newlevel;
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
289 poplevel (keep, reverse, functionbody)
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
297 tree block = NULL_TREE;
300 tree subblock_chain = current_binding_level->blocks;
302 int block_previously_created;
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
308 current_binding_level->names
309 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
310 : current_binding_level->names;
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
315 if (TREE_CODE (decl_node) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
317 && DECL_INITIAL (decl_node) != 0)
319 push_function_context ();
320 output_inline_function (decl_node);
321 pop_function_context ();
325 block_previously_created = (current_binding_level->this_block != 0);
326 if (block_previously_created)
327 block = current_binding_level->this_block;
328 else if (keep || functionbody)
329 block = make_node (BLOCK);
332 BLOCK_VARS (block) = keep ? decl_chain : 0;
333 BLOCK_SUBBLOCKS (block) = subblock_chain;
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node = subblock_chain; subblock_node;
338 subblock_node = TREE_CHAIN (subblock_node))
339 BLOCK_SUPERCONTEXT (subblock_node) = block;
341 /* Clear out the meanings of the local variables of this level. */
343 for (subblock_node = decl_chain; subblock_node;
344 subblock_node = TREE_CHAIN (subblock_node))
345 if (DECL_NAME (subblock_node) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node))
350 if (TREE_USED (subblock_node))
351 TREE_USED (DECL_NAME (subblock_node)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level *level = current_binding_level;
359 current_binding_level = current_binding_level->level_chain;
360 level->level_chain = free_binding_level;
361 free_binding_level = level;
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
370 DECL_INITIAL (current_function_decl) = block;
371 BLOCK_VARS (block) = 0;
375 if (!block_previously_created)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, block);
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
384 else if (subblock_chain)
385 current_binding_level->blocks
386 = chainon (current_binding_level->blocks, subblock_chain);
388 TREE_USED (block) = 1;
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
401 TREE_USED (block) = 1;
402 current_binding_level->blocks
403 = chainon (current_binding_level->blocks, block);
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
413 current_binding_level->this_block = block;
414 current_binding_level->names = chainon (current_binding_level->names,
416 current_binding_level->blocks = chainon (current_binding_level->blocks,
417 BLOCK_SUBBLOCKS (block));
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
427 struct binding_level *b;
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
433 b = global_binding_level;
434 DECL_CONTEXT (decl) = 0;
438 b = current_binding_level;
439 DECL_CONTEXT (decl) = current_function_decl;
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 TREE_CHAIN (decl) = b->names;
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
464 if (TREE_CODE (decl) == TYPE_DECL
465 && DECL_NAME (decl) != 0
466 && (TYPE_NAME (TREE_TYPE (decl)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
470 && ! DECL_ARTIFICIAL (decl))))
471 TYPE_NAME (TREE_TYPE (decl)) = decl;
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
480 gnat_init_decl_processing ()
484 /* Make the binding_level structure for global names. */
485 current_function_decl = 0;
486 current_binding_level = 0;
487 free_binding_level = 0;
489 global_binding_level = current_binding_level;
491 build_common_tree_nodes (0);
493 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
494 corresponding to the size of ptr_mode. Make this here since we need
495 this before we can expand the GNAT types. */
496 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
497 build_common_tree_nodes_2 (0);
499 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
501 /* We need to make the integer type before doing anything else.
502 We stitch this in to the appropriate GNAT type later. */
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
505 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
508 ptr_void_type_node = build_pointer_type (void_type_node);
512 /* Create the predefined scalar types such as `integer_type_node' needed
513 in the gcc back-end and initialize the global binding level. */
516 init_gigi_decls (long_long_float_type, exception_type)
517 tree long_long_float_type, exception_type;
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
533 longest_float_type_node));
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
538 except_type_node = TREE_TYPE (exception_type);
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
542 unsigned_type_node));
545 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
554 /* malloc is a function declaration tree for a function to allocate
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
562 NULL_TREE, 0, 1, 1, 0);
564 /* free is a function declaration tree for a function to free memory. */
567 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
568 build_function_type (void_type_node,
569 tree_cons (NULL_TREE,
572 NULL_TREE, 0, 1, 1, 0);
574 /* Make the types and functions used for exception processing. */
576 = build_array_type (gnat_type_for_mode (Pmode, 0),
577 build_index_type (build_int_2 (5, 0)));
578 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
579 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
581 /* Functions to get and set the jumpbuf pointer for the current thread. */
583 = create_subprog_decl
584 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
585 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
586 NULL_TREE, 0, 1, 1, 0);
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
592 build_function_type (void_type_node,
593 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
594 NULL_TREE, 0, 1, 1, 0);
596 /* Function to get the current exception. */
598 = create_subprog_decl
599 (get_identifier ("system__soft_links__get_gnat_exception"),
601 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
602 NULL_TREE, 0, 1, 1, 0);
604 /* Functions that raise exceptions. */
606 = create_subprog_decl
607 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
608 build_function_type (void_type_node,
609 tree_cons (NULL_TREE,
610 build_pointer_type (except_type_node),
612 NULL_TREE, 0, 1, 1, 0);
614 /* If in no exception handlers mode, all raise statements are redirected to
615 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
616 this procedure will never be called in this mode. */
617 if (No_Exception_Handlers_Set ())
620 = create_subprog_decl
621 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
622 build_function_type (void_type_node,
623 tree_cons (NULL_TREE,
624 build_pointer_type (char_type_node),
625 tree_cons (NULL_TREE,
628 NULL_TREE, 0, 1, 1, 0);
630 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0];
632 gnat_raise_decls[i] = decl;
635 /* Otherwise, make one decl for each exception reason. */
636 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
640 sprintf (name, "__gnat_rcheck_%.2d", i);
642 = create_subprog_decl
643 (get_identifier (name), NULL_TREE,
644 build_function_type (void_type_node,
645 tree_cons (NULL_TREE,
648 tree_cons (NULL_TREE,
651 NULL_TREE, 0, 1, 1, 0);
654 /* Indicate that these never return. */
655 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
656 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
657 TREE_TYPE (raise_nodefer_decl)
658 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
661 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
663 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
664 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
665 TREE_TYPE (gnat_raise_decls[i])
666 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
670 /* setjmp returns an integer and has one operand, which is a pointer to
673 = create_subprog_decl
674 (get_identifier ("setjmp"), NULL_TREE,
675 build_function_type (integer_type_node,
676 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
677 NULL_TREE, 0, 1, 1, 0);
679 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
680 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
682 main_identifier_node = get_identifier ("main");
684 ggc_add_tree_root (gnat_std_decls, ARRAY_SIZE (gnat_std_decls));
685 ggc_add_tree_root (gnat_raise_decls, ARRAY_SIZE (gnat_raise_decls));
688 /* This routine is called in tree.c to print an error message for invalid use
689 of an incomplete type. */
692 incomplete_type_error (dont_care_1, dont_care_2)
693 tree dont_care_1 ATTRIBUTE_UNUSED;
694 tree dont_care_2 ATTRIBUTE_UNUSED;
699 /* This function is called indirectly from toplev.c to handle incomplete
700 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
701 compile_file in toplev.c makes an indirect call through the function pointer
702 incomplete_decl_finalize_hook which is initialized to this routine in
703 init_decl_processing. */
706 gnat_finish_incomplete_decl (dont_care)
707 tree dont_care ATTRIBUTE_UNUSED;
712 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
713 nodes (FIELDLIST), finish constructing the record or union type.
714 If HAS_REP is nonzero, this record has a rep clause; don't call
715 layout_type but merely set the size and alignment ourselves.
716 If DEFER_DEBUG is nonzero, do not call the debugging routines
717 on this type; it will be done later. */
720 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
726 enum tree_code code = TREE_CODE (record_type);
727 tree ada_size = bitsize_zero_node;
728 tree size = bitsize_zero_node;
729 tree size_unit = size_zero_node;
733 TYPE_FIELDS (record_type) = fieldlist;
735 if (TYPE_NAME (record_type) != 0
736 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
737 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
739 TYPE_STUB_DECL (record_type)
740 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
743 /* We don't need both the typedef name and the record name output in
744 the debugging information, since they are the same. */
745 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
747 /* Globally initialize the record first. If this is a rep'ed record,
748 that just means some initializations; otherwise, layout the record. */
752 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
753 TYPE_MODE (record_type) = BLKmode;
754 if (TYPE_SIZE (record_type) == 0)
756 TYPE_SIZE (record_type) = bitsize_zero_node;
757 TYPE_SIZE_UNIT (record_type) = size_zero_node;
762 /* Ensure there isn't a size already set. There can be in an error
763 case where there is a rep clause but all fields have errors and
764 no longer have a position. */
765 TYPE_SIZE (record_type) = 0;
766 layout_type (record_type);
769 /* At this point, the position and size of each field is known. It was
770 either set before entry by a rep clause, or by laying out the type
771 above. We now make a pass through the fields (in reverse order for
772 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
773 (for rep'ed records that are not padding types); and the mode (for
776 if (code == QUAL_UNION_TYPE)
777 fieldlist = nreverse (fieldlist);
779 for (field = fieldlist; field; field = TREE_CHAIN (field))
781 tree type = TREE_TYPE (field);
782 tree this_size = DECL_SIZE (field);
783 tree this_size_unit = DECL_SIZE_UNIT (field);
784 tree this_ada_size = DECL_SIZE (field);
786 /* We need to make an XVE/XVU record if any field has variable size,
787 whether or not the record does. For example, if we have an union,
788 it may be that all fields, rounded up to the alignment, have the
789 same size, in which case we'll use that size. But the debug
790 output routines (except Dwarf2) won't be able to output the fields,
791 so we need to make the special record. */
792 if (TREE_CODE (this_size) != INTEGER_CST)
795 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
796 || TREE_CODE (type) == QUAL_UNION_TYPE)
797 && ! TYPE_IS_FAT_POINTER_P (type)
798 && ! TYPE_CONTAINS_TEMPLATE_P (type)
799 && TYPE_ADA_SIZE (type) != 0)
800 this_ada_size = TYPE_ADA_SIZE (type);
802 if (has_rep && ! DECL_BIT_FIELD (field))
803 TYPE_ALIGN (record_type)
804 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
809 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
810 size = size_binop (MAX_EXPR, size, this_size);
811 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
814 case QUAL_UNION_TYPE:
816 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
817 this_ada_size, ada_size));
818 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
820 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
821 this_size_unit, size_unit));
825 /* Since we know here that all fields are sorted in order of
826 increasing bit position, the size of the record is one
827 higher than the ending bit of the last field processed
828 unless we have a rep clause, since in that case we might
829 have a field outside a QUAL_UNION_TYPE that has a higher ending
830 position. So use a MAX in that case. Also, if this field is a
831 QUAL_UNION_TYPE, we need to take into account the previous size in
832 the case of empty variants. */
834 = merge_sizes (ada_size, bit_position (field), this_ada_size,
835 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
836 size = merge_sizes (size, bit_position (field), this_size,
837 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
839 = merge_sizes (size_unit, byte_position (field), this_size_unit,
840 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
848 if (code == QUAL_UNION_TYPE)
849 nreverse (fieldlist);
851 /* If this is a padding record, we never want to make the size smaller than
852 what was specified in it, if any. */
853 if (TREE_CODE (record_type) == RECORD_TYPE
854 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
856 size = TYPE_SIZE (record_type);
857 size_unit = TYPE_SIZE_UNIT (record_type);
860 /* Now set any of the values we've just computed that apply. */
861 if (! TYPE_IS_FAT_POINTER_P (record_type)
862 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
863 TYPE_ADA_SIZE (record_type) = ada_size;
865 #ifdef ROUND_TYPE_SIZE
866 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
867 size_unit = ROUND_TYPE_SIZE_UNIT (record_size, size_unit,
868 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
870 size = round_up (size, TYPE_ALIGN (record_type));
871 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
875 && ! (TREE_CODE (record_type) == RECORD_TYPE
876 && TYPE_IS_PADDING_P (record_type)
877 && TREE_CODE (size) != INTEGER_CST
878 && contains_placeholder_p (size)))
880 TYPE_SIZE (record_type) = size;
881 TYPE_SIZE_UNIT (record_type) = size_unit;
885 compute_record_mode (record_type);
889 /* If this record is of variable size, rename it so that the
890 debugger knows it is and make a new, parallel, record
891 that tells the debugger how the record is laid out. See
896 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
897 ? UNION_TYPE : TREE_CODE (record_type));
898 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
900 = concat_id_with_name (orig_id,
901 TREE_CODE (record_type) == QUAL_UNION_TYPE
903 tree last_pos = bitsize_zero_node;
906 TYPE_NAME (new_record_type) = new_id;
907 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
908 TYPE_STUB_DECL (new_record_type)
909 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
910 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
911 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
912 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
913 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
915 /* Now scan all the fields, replacing each field with a new
916 field corresponding to the new encoding. */
917 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
918 old_field = TREE_CHAIN (old_field))
920 tree field_type = TREE_TYPE (old_field);
921 tree field_name = DECL_NAME (old_field);
923 tree curpos = bit_position (old_field);
925 unsigned int align = 0;
928 /* See how the position was modified from the last position.
930 There are two basic cases we support: a value was added
931 to the last position or the last position was rounded to
932 a boundary and they something was added. Check for the
933 first case first. If not, see if there is any evidence
934 of rounding. If so, round the last position and try
937 If this is a union, the position can be taken as zero. */
939 if (TREE_CODE (new_record_type) == UNION_TYPE)
940 pos = bitsize_zero_node, align = 0;
942 pos = compute_related_constant (curpos, last_pos);
944 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
945 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
947 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
948 pos = compute_related_constant (curpos,
949 round_up (last_pos, align));
951 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
952 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
953 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
954 && host_integerp (TREE_OPERAND
955 (TREE_OPERAND (curpos, 0), 1),
960 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
961 pos = compute_related_constant (curpos,
962 round_up (last_pos, align));
965 /* If we can't compute a position, set it to zero.
967 ??? We really should abort here, but it's too much work
968 to get this correct for all cases. */
971 pos = bitsize_zero_node;
973 /* See if this type is variable-size and make a new type
974 and indicate the indirection if so. */
975 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
977 field_type = build_pointer_type (field_type);
981 /* Make a new field name, if necessary. */
982 if (var || align != 0)
987 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
988 align / BITS_PER_UNIT);
990 strcpy (suffix, "XVL");
992 field_name = concat_id_with_name (field_name, suffix);
995 new_field = create_field_decl (field_name, field_type,
997 DECL_SIZE (old_field), pos, 0);
998 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
999 TYPE_FIELDS (new_record_type) = new_field;
1001 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1002 zero. The only time it's not the last field of the record
1003 is when there are other components at fixed positions after
1004 it (meaning there was a rep clause for every field) and we
1005 want to be able to encode them. */
1006 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1007 (TREE_CODE (TREE_TYPE (old_field))
1010 : DECL_SIZE (old_field));
1013 TYPE_FIELDS (new_record_type)
1014 = nreverse (TYPE_FIELDS (new_record_type));
1016 rest_of_type_compilation (new_record_type, global_bindings_p ());
1019 rest_of_type_compilation (record_type, global_bindings_p ());
1023 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1024 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1025 if this represents a QUAL_UNION_TYPE in which case we must look for
1026 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1027 is nonzero, we must take the MAX of the end position of this field
1028 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1030 We return an expression for the size. */
1033 merge_sizes (last_size, first_bit, size, special, has_rep)
1035 tree first_bit, size;
1039 tree type = TREE_TYPE (last_size);
1041 if (! special || TREE_CODE (size) != COND_EXPR)
1043 tree new = size_binop (PLUS_EXPR, first_bit, size);
1046 new = size_binop (MAX_EXPR, last_size, new);
1051 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1052 integer_zerop (TREE_OPERAND (size, 1))
1053 ? last_size : merge_sizes (last_size, first_bit,
1054 TREE_OPERAND (size, 1),
1056 integer_zerop (TREE_OPERAND (size, 2))
1057 ? last_size : merge_sizes (last_size, first_bit,
1058 TREE_OPERAND (size, 2),
1062 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1063 related by the addition of a constant. Return that constant if so. */
1066 compute_related_constant (op0, op1)
1069 tree op0_var, op1_var;
1070 tree op0_con = split_plus (op0, &op0_var);
1071 tree op1_con = split_plus (op1, &op1_var);
1072 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1074 if (operand_equal_p (op0_var, op1_var, 0))
1076 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1082 /* Utility function of above to split a tree OP which may be a sum, into a
1083 constant part, which is returned, and a variable part, which is stored
1084 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1088 split_plus (in, pvar)
1092 tree result = bitsize_zero_node;
1094 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1095 in = TREE_OPERAND (in, 0);
1098 if (TREE_CODE (in) == INTEGER_CST)
1100 *pvar = bitsize_zero_node;
1103 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1105 tree lhs_var, rhs_var;
1106 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1107 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1109 result = size_binop (PLUS_EXPR, result, lhs_con);
1110 result = size_binop (TREE_CODE (in), result, rhs_con);
1112 if (lhs_var == TREE_OPERAND (in, 0)
1113 && rhs_var == TREE_OPERAND (in, 1))
1114 return bitsize_zero_node;
1116 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1120 return bitsize_zero_node;
1123 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1124 subprogram. If it is void_type_node, then we are dealing with a procedure,
1125 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1126 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1127 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1128 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1129 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1130 RETURNS_WITH_DSP is nonzero if the function is to return with a
1131 depressed stack pointer. */
1134 create_subprog_type (return_type, param_decl_list, cico_list,
1135 returns_unconstrained, returns_by_ref, returns_with_dsp)
1137 tree param_decl_list;
1139 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1141 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1142 the subprogram formal parameters. This list is generated by traversing the
1143 input list of PARM_DECL nodes. */
1144 tree param_type_list = NULL;
1148 for (param_decl = param_decl_list; param_decl;
1149 param_decl = TREE_CHAIN (param_decl))
1150 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1153 /* The list of the function parameter types has to be terminated by the void
1154 type to signal to the back-end that we are not dealing with a variable
1155 parameter subprogram, but that the subprogram has a fixed number of
1157 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1159 /* The list of argument types has been created in reverse
1161 param_type_list = nreverse (param_type_list);
1163 type = build_function_type (return_type, param_type_list);
1165 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1166 or the new type should, make a copy of TYPE. Likewise for
1167 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1168 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1169 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1170 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1171 type = copy_type (type);
1173 TYPE_CI_CO_LIST (type) = cico_list;
1174 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1175 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1176 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1180 /* Return a copy of TYPE but safe to modify in any way. */
1186 tree new = copy_node (type);
1188 /* copy_node clears this field instead of copying it, because it is
1189 aliased with TREE_CHAIN. */
1190 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1192 TYPE_POINTER_TO (new) = 0;
1193 TYPE_REFERENCE_TO (new) = 0;
1194 TYPE_MAIN_VARIANT (new) = new;
1195 TYPE_NEXT_VARIANT (new) = 0;
1200 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1201 TYPE_INDEX_TYPE is INDEX. */
1204 create_index_type (min, max, index)
1208 /* First build a type for the desired range. */
1209 tree type = build_index_2_type (min, max);
1211 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1212 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1213 is set, but not to INDEX, make a copy of this type with the requested
1214 index type. Note that we have no way of sharing these types, but that's
1215 only a small hole. */
1216 if (TYPE_INDEX_TYPE (type) == index)
1218 else if (TYPE_INDEX_TYPE (type) != 0)
1219 type = copy_type (type);
1221 TYPE_INDEX_TYPE (type) = index;
1225 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1226 string) and TYPE is a ..._TYPE node giving its data type.
1227 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1228 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1229 information about this type. */
1232 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1235 struct attrib *attr_list;
1239 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1240 enum tree_code code = TREE_CODE (type);
1242 DECL_ARTIFICIAL (type_decl) = artificial_p;
1243 pushdecl (type_decl);
1244 process_attributes (type_decl, attr_list);
1246 /* Pass type declaration information to the debugger unless this is an
1247 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1248 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1249 a dummy type, which will be completed later, or a type for which
1250 debugging information was not requested. */
1251 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1253 DECL_IGNORED_P (type_decl) = 1;
1254 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1255 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1256 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1257 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1262 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1263 ASM_NAME is its assembler name (if provided). TYPE is its data type
1264 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1265 expression; NULL_TREE if none.
1267 CONST_FLAG is nonzero if this variable is constant.
1269 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1270 the current compilation unit. This flag should be set when processing the
1271 variable definitions in a package specification. EXTERN_FLAG is nonzero
1272 when processing an external variable declaration (as opposed to a
1273 definition: no storage is to be allocated for the variable here).
1275 STATIC_FLAG is only relevant when not at top level. In that case
1276 it indicates whether to always allocate storage to the variable. */
1279 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1280 extern_flag, static_flag, attr_list)
1289 struct attrib *attr_list;
1294 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1295 && (global_bindings_p () || static_flag
1296 ? 0 != initializer_constant_valid_p (var_init,
1297 TREE_TYPE (var_init))
1298 : TREE_CONSTANT (var_init))));
1300 = build_decl ((const_flag && init_const
1301 /* Only make a CONST_DECL for sufficiently-small objects.
1302 We consider complex double "sufficiently-small" */
1303 && TYPE_SIZE (type) != 0
1304 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1305 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1306 GET_MODE_SIZE (DCmode)))
1307 ? CONST_DECL : VAR_DECL, var_name, type);
1308 tree assign_init = 0;
1310 /* If this is external, throw away any initializations unless this is a
1311 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1312 we are defining a global here, leave a constant initialization and save
1313 any variable elaborations for the elaboration routine. Otherwise, if
1314 the initializing expression is not the same as TYPE, generate the
1315 initialization with an assignment statement, since it knows how
1316 to do the required adjustents. If we are just annotating types,
1317 throw away the initialization if it isn't a constant. */
1319 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1320 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1323 if (global_bindings_p () && var_init != 0 && ! init_const)
1325 add_pending_elaborations (var_decl, var_init);
1329 else if (var_init != 0
1330 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1331 != TYPE_MAIN_VARIANT (type))
1332 || (static_flag && ! init_const)))
1333 assign_init = var_init, var_init = 0;
1335 DECL_COMMON (var_decl) = !flag_no_common;
1336 DECL_INITIAL (var_decl) = var_init;
1337 TREE_READONLY (var_decl) = const_flag;
1338 DECL_EXTERNAL (var_decl) = extern_flag;
1339 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1340 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1341 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1342 = TYPE_VOLATILE (type);
1344 /* At the global binding level we need to allocate static storage for the
1345 variable if and only if its not external. If we are not at the top level
1346 we allocate automatic storage unless requested not to. */
1347 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1350 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1352 process_attributes (var_decl, attr_list);
1354 /* Add this decl to the current binding level and generate any
1355 needed code and RTL. */
1356 var_decl = pushdecl (var_decl);
1357 expand_decl (var_decl);
1359 if (DECL_CONTEXT (var_decl) != 0)
1360 expand_decl_init (var_decl);
1362 /* If this is volatile, force it into memory. */
1363 if (TREE_SIDE_EFFECTS (var_decl))
1364 mark_addressable (var_decl);
1366 if (TREE_CODE (var_decl) != CONST_DECL)
1367 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1369 if (assign_init != 0)
1371 /* If VAR_DECL has a padded type, convert it to the unpadded
1372 type so the assignment is done properly. */
1373 tree lhs = var_decl;
1375 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1376 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1377 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1379 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1386 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1387 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1388 this field is in a record type with a "pragma pack". If SIZE is nonzero
1389 it is the specified size for this field. If POS is nonzero, it is the bit
1390 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1391 the address of this field for aliasing purposes. */
1394 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1403 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1405 DECL_CONTEXT (field_decl) = record_type;
1406 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1408 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1409 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1410 If it is a padding type where the inner field is of variable size, it
1411 must be at its natural alignment. Just handle the packed case here; we
1412 will disallow non-aligned rep clauses elsewhere. */
1413 if (packed && TYPE_MODE (field_type) == BLKmode)
1414 DECL_ALIGN (field_decl)
1415 = ((TREE_CODE (field_type) == RECORD_TYPE
1416 && TYPE_IS_PADDING_P (field_type)
1417 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1418 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1420 /* If a size is specified, use it. Otherwise, see if we have a size
1421 to use that may differ from the natural size of the object. */
1423 size = convert (bitsizetype, size);
1426 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1427 TYPE_SIZE (field_type), 0))
1428 size = rm_size (field_type);
1430 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1432 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1433 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1434 size = round_up (size, BITS_PER_UNIT);
1437 /* Make a bitfield if a size is specified for two reasons: first if the size
1438 differs from the natural size. Second, if the alignment is insufficient.
1439 There are a number of ways the latter can be true. But never make a
1440 bitfield if the type of the field has a nonconstant size. */
1442 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1443 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1444 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1446 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1447 bitsize_int (TYPE_ALIGN
1450 || (TYPE_ALIGN (record_type) != 0
1451 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1453 DECL_BIT_FIELD (field_decl) = 1;
1454 DECL_SIZE (field_decl) = size;
1455 if (! packed && pos == 0)
1456 DECL_ALIGN (field_decl)
1457 = (TYPE_ALIGN (record_type) != 0
1458 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1459 : TYPE_ALIGN (field_type));
1462 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1463 DECL_ALIGN (field_decl)
1464 = MAX (DECL_ALIGN (field_decl),
1465 DECL_BIT_FIELD (field_decl) ? 1
1466 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1467 : TYPE_ALIGN (field_type));
1471 /* We need to pass in the alignment the DECL is known to have.
1472 This is the lowest-order bit set in POS, but no more than
1473 the alignment of the record, if one is specified. Note
1474 that an alignment of 0 is taken as infinite. */
1475 unsigned int known_align;
1477 if (host_integerp (pos, 1))
1478 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1480 known_align = BITS_PER_UNIT;
1482 if (TYPE_ALIGN (record_type)
1483 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1484 known_align = TYPE_ALIGN (record_type);
1486 layout_decl (field_decl, known_align);
1487 SET_DECL_OFFSET_ALIGN (field_decl,
1488 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1490 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1491 &DECL_FIELD_BIT_OFFSET (field_decl),
1492 DECL_OFFSET_ALIGN (field_decl), pos);
1494 DECL_HAS_REP_P (field_decl) = 1;
1497 /* If the field type is passed by reference, we will have pointers to the
1498 field, so it is addressable. */
1499 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1502 /* Mark the decl as nonaddressable if it either is indicated so semantically
1503 or if it is a bit field. */
1504 DECL_NONADDRESSABLE_P (field_decl)
1505 = ! addressable || DECL_BIT_FIELD (field_decl);
1510 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1511 effects, has the value of zero. */
1517 if (TREE_CODE (exp) == COMPOUND_EXPR)
1518 return value_zerop (TREE_OPERAND (exp, 1));
1520 return integer_zerop (exp);
1523 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1524 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1525 readonly (either an IN parameter or an address of a pass-by-ref
1529 create_param_decl (param_name, param_type, readonly)
1534 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1536 DECL_ARG_TYPE (param_decl) = param_type;
1537 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1538 TREE_READONLY (param_decl) = readonly;
1542 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1545 process_attributes (decl, attr_list)
1547 struct attrib *attr_list;
1549 for (; attr_list; attr_list = attr_list->next)
1550 switch (attr_list->type)
1552 case ATTR_MACHINE_ATTRIBUTE:
1553 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1555 ATTR_FLAG_TYPE_IN_PLACE);
1558 case ATTR_LINK_ALIAS:
1559 TREE_STATIC (decl) = 1;
1560 assemble_alias (decl, attr_list->name);
1563 case ATTR_WEAK_EXTERNAL:
1565 declare_weak (decl);
1567 post_error ("?weak declarations not supported on this target",
1568 attr_list->error_point);
1571 case ATTR_LINK_SECTION:
1572 #ifdef ASM_OUTPUT_SECTION_NAME
1573 DECL_SECTION_NAME (decl)
1574 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1575 IDENTIFIER_POINTER (attr_list->name));
1576 DECL_COMMON (decl) = 0;
1578 post_error ("?section attributes are not supported for this target",
1579 attr_list->error_point);
1585 /* Add some pending elaborations on the list. */
1588 add_pending_elaborations (var_decl, var_init)
1593 Check_Elaboration_Code_Allowed (error_gnat_node);
1595 pending_elaborations
1596 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1599 /* Obtain any pending elaborations and clear the old list. */
1602 get_pending_elaborations ()
1604 /* Each thing added to the list went on the end; we want it on the
1606 tree result = TREE_CHAIN (pending_elaborations);
1608 TREE_CHAIN (pending_elaborations) = 0;
1612 /* Mark the binding level stack. */
1615 mark_binding_level (arg)
1618 struct binding_level *level = *(struct binding_level **) arg;
1620 for (; level != 0; level = level->level_chain)
1622 ggc_mark_tree (level->names);
1623 ggc_mark_tree (level->blocks);
1624 ggc_mark_tree (level->this_block);
1628 /* Mark the pending elaboration list. */
1634 struct e_stack *p = *((struct e_stack **) data);
1638 ggc_mark_tree (p->elab_list);
1639 mark_e_stack (&p->next);
1643 /* Return nonzero if there are pending elaborations. */
1646 pending_elaborations_p ()
1648 return TREE_CHAIN (pending_elaborations) != 0;
1651 /* Save a copy of the current pending elaboration list and make a new
1655 push_pending_elaborations ()
1657 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1659 p->next = elist_stack;
1660 p->elab_list = pending_elaborations;
1662 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1665 /* Pop the stack of pending elaborations. */
1668 pop_pending_elaborations ()
1670 struct e_stack *p = elist_stack;
1672 pending_elaborations = p->elab_list;
1673 elist_stack = p->next;
1677 /* Return the current position in pending_elaborations so we can insert
1678 elaborations after that point. */
1681 get_elaboration_location ()
1683 return tree_last (pending_elaborations);
1686 /* Insert the current elaborations after ELAB, which is in some elaboration
1690 insert_elaboration_list (elab)
1693 tree next = TREE_CHAIN (elab);
1695 if (TREE_CHAIN (pending_elaborations))
1697 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1698 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1699 TREE_CHAIN (pending_elaborations) = 0;
1703 /* Returns a LABEL_DECL node for LABEL_NAME. */
1706 create_label_decl (label_name)
1709 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1711 DECL_CONTEXT (label_decl) = current_function_decl;
1712 DECL_MODE (label_decl) = VOIDmode;
1713 DECL_SOURCE_LINE (label_decl) = lineno;
1714 DECL_SOURCE_FILE (label_decl) = input_filename;
1719 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1720 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1721 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1722 PARM_DECL nodes chained through the TREE_CHAIN field).
1724 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1725 appropriate fields in the FUNCTION_DECL. */
1728 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1729 inline_flag, public_flag, extern_flag, attr_list)
1733 tree param_decl_list;
1737 struct attrib *attr_list;
1739 tree return_type = TREE_TYPE (subprog_type);
1740 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1742 /* If this is a function nested inside an inlined external function, it
1743 means we aren't going to compile the outer function unless it is
1744 actually inlined, so do the same for us. */
1745 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1746 && DECL_EXTERNAL (current_function_decl))
1749 DECL_EXTERNAL (subprog_decl) = extern_flag;
1750 TREE_PUBLIC (subprog_decl) = public_flag;
1751 DECL_INLINE (subprog_decl) = inline_flag;
1752 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1753 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1754 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1755 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1756 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1759 DECL_ASSEMBLER_NAME (subprog_decl) = asm_name;
1761 process_attributes (subprog_decl, attr_list);
1763 /* Add this decl to the current binding level. */
1764 subprog_decl = pushdecl (subprog_decl);
1766 /* Output the assembler code and/or RTL for the declaration. */
1767 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1769 return subprog_decl;
1772 /* Count how deep we are into nested functions. This is because
1773 we shouldn't call the backend function context routines unless we
1774 are in a nested function. */
1776 static int function_nesting_depth;
1778 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1779 body. This routine needs to be invoked before processing the declarations
1780 appearing in the subprogram. */
1783 begin_subprog_body (subprog_decl)
1786 tree param_decl_list;
1790 if (function_nesting_depth++ != 0)
1791 push_function_context ();
1793 announce_function (subprog_decl);
1795 /* Make this field nonzero so further routines know that this is not
1796 tentative. error_mark_node is replaced below (in poplevel) with the
1798 DECL_INITIAL (subprog_decl) = error_mark_node;
1800 /* This function exists in static storage. This does not mean `static' in
1802 TREE_STATIC (subprog_decl) = 1;
1804 /* Enter a new binding level. */
1805 current_function_decl = subprog_decl;
1808 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1809 subprogram body) so that they can be recognized as local variables in the
1812 The list of PARM_DECL nodes is stored in the right order in
1813 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1814 which they are transmitted to `pushdecl' we need to reverse the list of
1815 PARM_DECLs if we want it to be stored in the right order. The reason why
1816 we want to make sure the PARM_DECLs are stored in the correct order is
1817 that this list will be retrieved in a few lines with a call to `getdecl'
1818 to store it back into the DECL_ARGUMENTS field. */
1819 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1821 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1823 next_param = TREE_CHAIN (param_decl);
1824 TREE_CHAIN (param_decl) = NULL;
1825 pushdecl (param_decl);
1828 /* Store back the PARM_DECL nodes. They appear in the right order. */
1829 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1831 init_function_start (subprog_decl, input_filename, lineno);
1832 expand_function_start (subprog_decl, 0);
1834 /* If this function is `main', emit a call to `__main'
1835 to run global initializers, etc. */
1836 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1837 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1838 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1839 expand_main_function ();
1842 /* Finish the definition of the current subprogram and compile it all the way
1843 to assembler language output. */
1852 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1853 = current_function_decl;
1855 /* Mark the RESULT_DECL as being in this subprogram. */
1856 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1858 expand_function_end (input_filename, lineno, 0);
1860 /* If this is a nested function, push a new GC context. That will keep
1861 local variables on the stack from being collected while we're doing
1862 the compilation of this function. */
1863 if (function_nesting_depth > 1)
1864 ggc_push_context ();
1866 rest_of_compilation (current_function_decl);
1868 if (function_nesting_depth > 1)
1872 /* If we're sure this function is defined in this file then mark it
1874 if (TREE_ASM_WRITTEN (current_function_decl))
1875 mark_fn_defined_in_this_file (current_function_decl);
1878 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1879 not be seen when we call this function and will be in
1880 unallocated memory anyway. */
1881 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1882 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1883 TREE_VALUE (cico_list) = 0;
1885 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1887 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1888 was saved for inline, in which case the DECL_RTLs are in
1889 preserved memory. */
1890 for (decl = DECL_ARGUMENTS (current_function_decl);
1891 decl != 0; decl = TREE_CHAIN (decl))
1893 SET_DECL_RTL (decl, 0);
1894 DECL_INCOMING_RTL (decl) = 0;
1897 /* Similarly, discard DECL_RTL of the return value. */
1898 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1900 /* But DECL_INITIAL must remain nonzero so we know this
1901 was an actual function definition unless toplev.c decided not
1903 if (DECL_INITIAL (current_function_decl) != 0)
1904 DECL_INITIAL (current_function_decl) = error_mark_node;
1906 DECL_ARGUMENTS (current_function_decl) = 0;
1909 /* If we are not at the bottom of the function nesting stack, pop up to
1910 the containing function. Otherwise show we aren't in any function. */
1911 if (--function_nesting_depth != 0)
1912 pop_function_context ();
1914 current_function_decl = 0;
1917 /* Return a definition for a builtin function named NAME and whose data type
1918 is TYPE. TYPE should be a function type with argument types.
1919 FUNCTION_CODE tells later passes how to compile calls to this function.
1920 See tree.h for its possible values.
1922 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1923 the name to be called if we can't opencode the function. */
1926 builtin_function (name, type, function_code, class, library_name)
1930 enum built_in_class class;
1931 const char *library_name;
1933 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1935 DECL_EXTERNAL (decl) = 1;
1936 TREE_PUBLIC (decl) = 1;
1938 DECL_ASSEMBLER_NAME (decl) = get_identifier (library_name);
1941 DECL_BUILT_IN_CLASS (decl) = class;
1942 DECL_FUNCTION_CODE (decl) = function_code;
1946 /* Return an integer type with the number of bits of precision given by
1947 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1948 it is a signed type. */
1951 gnat_type_for_size (precision, unsignedp)
1958 if (precision <= 2 * MAX_BITS_PER_WORD
1959 && signed_and_unsigned_types[precision][unsignedp] != 0)
1960 return signed_and_unsigned_types[precision][unsignedp];
1963 t = make_unsigned_type (precision);
1965 t = make_signed_type (precision);
1967 if (precision <= 2 * MAX_BITS_PER_WORD)
1968 signed_and_unsigned_types[precision][unsignedp] = t;
1970 if (TYPE_NAME (t) == 0)
1972 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1973 TYPE_NAME (t) = get_identifier (type_name);
1979 /* Likewise for floating-point types. */
1982 float_type_for_size (precision, mode)
1984 enum machine_mode mode;
1989 if (float_types[(int) mode] != 0)
1990 return float_types[(int) mode];
1992 float_types[(int) mode] = t = make_node (REAL_TYPE);
1993 TYPE_PRECISION (t) = precision;
1996 if (TYPE_MODE (t) != mode)
1999 if (TYPE_NAME (t) == 0)
2001 sprintf (type_name, "FLOAT_%d", precision);
2002 TYPE_NAME (t) = get_identifier (type_name);
2008 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2009 an unsigned type; otherwise a signed type is returned. */
2012 gnat_type_for_mode (mode, unsignedp)
2013 enum machine_mode mode;
2016 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2017 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2019 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2022 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2025 gnat_unsigned_type (type_node)
2028 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2030 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2032 type = copy_node (type);
2033 TREE_TYPE (type) = type_node;
2035 else if (TREE_TYPE (type_node) != 0
2036 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2037 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2039 type = copy_node (type);
2040 TREE_TYPE (type) = TREE_TYPE (type_node);
2046 /* Return the signed version of a TYPE_NODE, a scalar type. */
2049 gnat_signed_type (type_node)
2052 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2054 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2056 type = copy_node (type);
2057 TREE_TYPE (type) = type_node;
2059 else if (TREE_TYPE (type_node) != 0
2060 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2061 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2063 type = copy_node (type);
2064 TREE_TYPE (type) = TREE_TYPE (type_node);
2070 /* Return a type the same as TYPE except unsigned or signed according to
2074 gnat_signed_or_unsigned_type (unsignedp, type)
2078 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2081 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2084 /* EXP is an expression for the size of an object. If this size contains
2085 discriminant references, replace them with the maximum (if MAX_P) or
2086 minimum (if ! MAX_P) possible value of the discriminant. */
2089 max_size (exp, max_p)
2093 enum tree_code code = TREE_CODE (exp);
2094 tree type = TREE_TYPE (exp);
2096 switch (TREE_CODE_CLASS (code))
2103 if (code == TREE_LIST)
2104 return tree_cons (TREE_PURPOSE (exp),
2105 max_size (TREE_VALUE (exp), max_p),
2106 TREE_CHAIN (exp) != 0
2107 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2111 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2112 modify. Otherwise, we abort since it is something we can't
2114 if (! contains_placeholder_p (exp))
2117 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2119 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2122 return max_p ? size_one_node : size_zero_node;
2127 switch (TREE_CODE_LENGTH (code))
2130 if (code == NON_LVALUE_EXPR)
2131 return max_size (TREE_OPERAND (exp, 0), max_p);
2134 fold (build1 (code, type,
2135 max_size (TREE_OPERAND (exp, 0),
2136 code == NEGATE_EXPR ? ! max_p : max_p)));
2139 if (code == RTL_EXPR)
2141 else if (code == COMPOUND_EXPR)
2142 return max_size (TREE_OPERAND (exp, 1), max_p);
2143 else if (code == WITH_RECORD_EXPR)
2147 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2148 tree rhs = max_size (TREE_OPERAND (exp, 1),
2149 code == MINUS_EXPR ? ! max_p : max_p);
2151 /* Special-case wanting the maximum value of a MIN_EXPR.
2152 In that case, if one side overflows, return the other.
2153 sizetype is signed, but we know sizes are non-negative.
2154 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2155 overflowing or the maximum possible value and the RHS
2157 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2159 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2161 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2162 && (TREE_OVERFLOW (lhs)
2163 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2164 && ! TREE_CONSTANT (rhs))
2167 return fold (build (code, type, lhs, rhs));
2171 if (code == SAVE_EXPR)
2173 else if (code == COND_EXPR)
2174 return fold (build (MAX_EXPR, type,
2175 max_size (TREE_OPERAND (exp, 1), max_p),
2176 max_size (TREE_OPERAND (exp, 2), max_p)));
2177 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2178 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2179 max_size (TREE_OPERAND (exp, 1), max_p));
2186 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2187 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2188 Return a constructor for the template. */
2191 build_template (template_type, array_type, expr)
2196 tree template_elts = NULL_TREE;
2197 tree bound_list = NULL_TREE;
2200 if (TREE_CODE (array_type) == RECORD_TYPE
2201 && (TYPE_IS_PADDING_P (array_type)
2202 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2203 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2205 if (TREE_CODE (array_type) == ARRAY_TYPE
2206 || (TREE_CODE (array_type) == INTEGER_TYPE
2207 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2208 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2210 /* First make the list for a CONSTRUCTOR for the template. Go down the
2211 field list of the template instead of the type chain because this
2212 array might be an Ada array of arrays and we can't tell where the
2213 nested arrays stop being the underlying object. */
2215 for (field = TYPE_FIELDS (template_type); field;
2217 ? (bound_list = TREE_CHAIN (bound_list))
2218 : (array_type = TREE_TYPE (array_type))),
2219 field = TREE_CHAIN (TREE_CHAIN (field)))
2221 tree bounds, min, max;
2223 /* If we have a bound list, get the bounds from there. Likewise
2224 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2225 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2226 This will give us a maximum range. */
2227 if (bound_list != 0)
2228 bounds = TREE_VALUE (bound_list);
2229 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2230 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2231 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2232 && DECL_BY_COMPONENT_PTR_P (expr))
2233 bounds = TREE_TYPE (field);
2237 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2238 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2240 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2241 surround them with a WITH_RECORD_EXPR giving EXPR as the
2243 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2244 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2245 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2246 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2248 template_elts = tree_cons (TREE_CHAIN (field), max,
2249 tree_cons (field, min, template_elts));
2252 return build_constructor (template_type, nreverse (template_elts));
2255 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2256 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2257 in the type contains in its DECL_INITIAL the expression to use when
2258 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2259 to print out an error message if the mechanism cannot be applied to
2260 an object of that type and also for the name. */
2263 build_vms_descriptor (type, mech, gnat_entity)
2265 Mechanism_Type mech;
2266 Entity_Id gnat_entity;
2268 tree record_type = make_node (RECORD_TYPE);
2269 tree field_list = 0;
2278 /* If TYPE is an unconstrained array, use the underlying array type. */
2279 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2280 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2282 /* If this is an array, compute the number of dimensions in the array,
2283 get the index types, and point to the inner type. */
2284 if (TREE_CODE (type) != ARRAY_TYPE)
2287 for (ndim = 1, inner_type = type;
2288 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2289 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2290 ndim++, inner_type = TREE_TYPE (inner_type))
2293 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2295 if (mech != By_Descriptor_NCA
2296 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2297 for (i = ndim - 1, inner_type = type;
2299 i--, inner_type = TREE_TYPE (inner_type))
2300 idx_arr[i] = TYPE_DOMAIN (inner_type);
2302 for (i = 0, inner_type = type;
2304 i++, inner_type = TREE_TYPE (inner_type))
2305 idx_arr[i] = TYPE_DOMAIN (inner_type);
2307 /* Now get the DTYPE value. */
2308 switch (TREE_CODE (type))
2312 if (TYPE_VAX_FLOATING_POINT_P (type))
2313 switch ((int) TYPE_DIGITS_VALUE (type))
2326 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2329 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2332 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2335 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2338 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2341 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2347 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2351 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2352 && TYPE_VAX_FLOATING_POINT_P (type))
2353 switch ((int) TYPE_DIGITS_VALUE (type))
2365 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2376 /* Get the CLASS value. */
2379 case By_Descriptor_A:
2382 case By_Descriptor_NCA:
2385 case By_Descriptor_SB:
2392 /* Make the type for a descriptor for VMS. The first four fields
2393 are the same for all types. */
2396 = chainon (field_list,
2397 make_descriptor_field
2398 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2399 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2401 field_list = chainon (field_list,
2402 make_descriptor_field ("DTYPE",
2403 gnat_type_for_size (8, 1),
2404 record_type, size_int (dtype)));
2405 field_list = chainon (field_list,
2406 make_descriptor_field ("CLASS",
2407 gnat_type_for_size (8, 1),
2408 record_type, size_int (class)));
2411 = chainon (field_list,
2412 make_descriptor_field ("POINTER",
2413 build_pointer_type (type),
2416 build_pointer_type (type),
2417 build (PLACEHOLDER_EXPR,
2423 case By_Descriptor_S:
2426 case By_Descriptor_SB:
2428 = chainon (field_list,
2429 make_descriptor_field
2430 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2431 TREE_CODE (type) == ARRAY_TYPE
2432 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2434 = chainon (field_list,
2435 make_descriptor_field
2436 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2437 TREE_CODE (type) == ARRAY_TYPE
2438 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2441 case By_Descriptor_A:
2442 case By_Descriptor_NCA:
2443 field_list = chainon (field_list,
2444 make_descriptor_field ("SCALE",
2445 gnat_type_for_size (8, 1),
2449 field_list = chainon (field_list,
2450 make_descriptor_field ("DIGITS",
2451 gnat_type_for_size (8, 1),
2456 = chainon (field_list,
2457 make_descriptor_field
2458 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2459 size_int (mech == By_Descriptor_NCA
2461 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2462 : (TREE_CODE (type) == ARRAY_TYPE
2463 && TYPE_CONVENTION_FORTRAN_P (type)
2466 field_list = chainon (field_list,
2467 make_descriptor_field ("DIMCT",
2468 gnat_type_for_size (8, 1),
2472 field_list = chainon (field_list,
2473 make_descriptor_field ("ARSIZE",
2474 gnat_type_for_size (32, 1),
2476 size_in_bytes (type)));
2478 /* Now build a pointer to the 0,0,0... element. */
2479 tem = build (PLACEHOLDER_EXPR, type);
2480 for (i = 0, inner_type = type; i < ndim;
2481 i++, inner_type = TREE_TYPE (inner_type))
2482 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2483 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2486 = chainon (field_list,
2487 make_descriptor_field
2488 ("A0", build_pointer_type (inner_type), record_type,
2489 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2491 /* Next come the addressing coefficients. */
2493 for (i = 0; i < ndim; i++)
2497 = size_binop (MULT_EXPR, tem,
2498 size_binop (PLUS_EXPR,
2499 size_binop (MINUS_EXPR,
2500 TYPE_MAX_VALUE (idx_arr[i]),
2501 TYPE_MIN_VALUE (idx_arr[i])),
2504 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2505 fname[1] = '0' + i, fname[2] = 0;
2507 = chainon (field_list,
2508 make_descriptor_field (fname,
2509 gnat_type_for_size (32, 1),
2510 record_type, idx_length));
2512 if (mech == By_Descriptor_NCA)
2516 /* Finally here are the bounds. */
2517 for (i = 0; i < ndim; i++)
2521 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2523 = chainon (field_list,
2524 make_descriptor_field
2525 (fname, gnat_type_for_size (32, 1), record_type,
2526 TYPE_MIN_VALUE (idx_arr[i])));
2530 = chainon (field_list,
2531 make_descriptor_field
2532 (fname, gnat_type_for_size (32, 1), record_type,
2533 TYPE_MAX_VALUE (idx_arr[i])));
2538 post_error ("unsupported descriptor type for &", gnat_entity);
2541 finish_record_type (record_type, field_list, 0, 1);
2542 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2548 /* Utility routine for above code to make a field. */
2551 make_descriptor_field (name, type, rec_type, initial)
2558 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2560 DECL_INITIAL (field) = initial;
2564 /* Build a type to be used to represent an aliased object whose nominal
2565 type is an unconstrained array. This consists of a RECORD_TYPE containing
2566 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2567 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2568 is used to represent an arbitrary unconstrained object. Use NAME
2569 as the name of the record. */
2572 build_unc_object_type (template_type, object_type, name)
2577 tree type = make_node (RECORD_TYPE);
2578 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2579 template_type, type, 0, 0, 0, 1);
2580 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2583 TYPE_NAME (type) = name;
2584 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2585 finish_record_type (type,
2586 chainon (chainon (NULL_TREE, template_field),
2593 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2594 the normal case this is just two adjustments, but we have more to do
2595 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2598 update_pointer_to (old_type, new_type)
2602 tree ptr = TYPE_POINTER_TO (old_type);
2603 tree ref = TYPE_REFERENCE_TO (old_type);
2606 /* If this is the main variant, process all the other variants first. */
2607 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2608 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2609 type = TYPE_NEXT_VARIANT (type))
2610 update_pointer_to (type, new_type);
2612 /* If no pointer or reference, we are done. Otherwise, get the new type with
2613 the same qualifiers as the old type and see if it is the same as the old
2615 if (ptr == 0 && ref == 0)
2618 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2619 if (old_type == new_type)
2622 /* First handle the simple case. */
2623 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2626 TREE_TYPE (ptr) = new_type;
2627 TYPE_POINTER_TO (new_type) = ptr;
2630 TREE_TYPE (ref) = new_type;
2631 TYPE_REFERENCE_TO (new_type) = ref;
2633 if (ptr != 0 && TYPE_NAME (ptr) != 0
2634 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2635 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2636 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2637 global_bindings_p (), 0);
2638 if (ref != 0 && TYPE_NAME (ref) != 0
2639 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2640 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2641 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2642 global_bindings_p (), 0);
2645 /* Now deal with the unconstrained array case. In this case the "pointer"
2646 is actually a RECORD_TYPE where the types of both fields are
2647 pointers to void. In that case, copy the field list from the
2648 old type to the new one and update the fields' context. */
2649 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2654 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2659 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2660 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2661 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2663 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2666 ??? This is now the only use of gnat_substitute_in_type, which
2667 is now a very "heavy" routine to do this, so it should be replaced
2669 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2670 new_ref = build (COMPONENT_REF, ptr_temp_type,
2671 build (PLACEHOLDER_EXPR, ptr),
2672 TREE_CHAIN (TYPE_FIELDS (ptr)));
2675 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2676 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2677 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2679 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2680 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2682 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2683 = TREE_TYPE (new_type) = ptr;
2685 /* Now handle updating the allocation record, what the thin pointer
2686 points to. Update all pointers from the old record into the new
2687 one, update the types of the fields, and recompute the size. */
2689 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2691 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2692 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2693 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2694 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2695 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2696 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2697 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2699 TYPE_SIZE (new_obj_rec)
2700 = size_binop (PLUS_EXPR,
2701 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2702 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2703 TYPE_SIZE_UNIT (new_obj_rec)
2704 = size_binop (PLUS_EXPR,
2705 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2706 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2707 rest_of_type_compilation (ptr, global_bindings_p ());
2711 /* Convert a pointer to a constrained array into a pointer to a fat
2712 pointer. This involves making or finding a template. */
2715 convert_to_fat_pointer (type, expr)
2719 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2720 tree template, template_addr;
2721 tree etype = TREE_TYPE (expr);
2723 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2724 pointer to the template and array. */
2725 if (integer_zerop (expr))
2729 tree_cons (TYPE_FIELDS (type),
2730 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2731 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2732 convert (build_pointer_type (template_type),
2736 /* If EXPR is a thin pointer, make the template and data from the record. */
2738 else if (TYPE_THIN_POINTER_P (etype))
2740 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2742 expr = save_expr (expr);
2743 if (TREE_CODE (expr) == ADDR_EXPR)
2744 expr = TREE_OPERAND (expr, 0);
2746 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2748 template = build_component_ref (expr, NULL_TREE, fields);
2749 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2750 build_component_ref (expr, NULL_TREE,
2751 TREE_CHAIN (fields)));
2754 /* Otherwise, build the constructor for the template. */
2755 template = build_template (template_type, TREE_TYPE (etype), expr);
2757 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2759 /* The result is a CONSTRUCTOR for the fat pointer. */
2761 build_constructor (type,
2762 tree_cons (TYPE_FIELDS (type), expr,
2763 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2764 template_addr, NULL_TREE)));
2767 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2768 is something that is a fat pointer, so convert to it first if it EXPR
2769 is not already a fat pointer. */
2772 convert_to_thin_pointer (type, expr)
2776 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2778 = convert_to_fat_pointer
2779 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2781 /* We get the pointer to the data and use a NOP_EXPR to make it the
2783 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2784 expr = build1 (NOP_EXPR, type, expr);
2789 /* Create an expression whose value is that of EXPR,
2790 converted to type TYPE. The TREE_TYPE of the value
2791 is always TYPE. This function implements all reasonable
2792 conversions; callers should filter out those that are
2793 not permitted by the language being compiled. */
2796 convert (type, expr)
2799 enum tree_code code = TREE_CODE (type);
2800 tree etype = TREE_TYPE (expr);
2801 enum tree_code ecode = TREE_CODE (etype);
2804 /* If EXPR is already the right type, we are done. */
2808 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2810 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2811 return build (WITH_RECORD_EXPR, type,
2812 convert (type, TREE_OPERAND (expr, 0)),
2813 TREE_OPERAND (expr, 1));
2815 /* If the input type has padding, remove it by doing a component reference
2816 to the field. If the output type has padding, make a constructor
2817 to build the record. If both input and output have padding and are
2818 of variable size, do this as an unchecked conversion. */
2819 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2820 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2821 && (! TREE_CONSTANT (TYPE_SIZE (type))
2822 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2824 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2826 /* If we have just converted to this padded type, just get
2827 the inner expression. */
2828 if (TREE_CODE (expr) == CONSTRUCTOR
2829 && CONSTRUCTOR_ELTS (expr) != 0
2830 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2831 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2833 return convert (type, build_component_ref (expr, NULL_TREE,
2834 TYPE_FIELDS (etype)));
2836 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2838 /* If we previously converted from another type and our type is
2839 of variable size, remove the conversion to avoid the need for
2840 variable-size temporaries. */
2841 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2842 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2843 expr = TREE_OPERAND (expr, 0);
2845 /* If we are just removing the padding from expr, convert the original
2846 object if we have variable size. That will avoid the need
2847 for some variable-size temporaries. */
2848 if (TREE_CODE (expr) == COMPONENT_REF
2849 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2850 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2851 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2852 return convert (type, TREE_OPERAND (expr, 0));
2854 /* If the result type is a padded type with a self-referentially-sized
2855 field and the expression type is a record, do this as an
2856 unchecked converstion. */
2857 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2858 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2859 && TREE_CODE (etype) == RECORD_TYPE)
2860 return unchecked_convert (type, expr);
2864 build_constructor (type,
2865 tree_cons (TYPE_FIELDS (type),
2867 (TYPE_FIELDS (type)),
2872 /* If the input is a biased type, adjust first. */
2873 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2874 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2875 fold (build1 (GNAT_NOP_EXPR,
2876 TREE_TYPE (etype), expr)),
2877 TYPE_MIN_VALUE (etype))));
2879 /* If the input is a left-justified modular type, we need to extract
2880 the actual object before converting it to any other type with the
2881 exception of an unconstrained array. */
2882 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2883 && code != UNCONSTRAINED_ARRAY_TYPE)
2884 return convert (type, build_component_ref (expr, NULL_TREE,
2885 TYPE_FIELDS (etype)));
2887 /* If converting a type that does not contain a template into one
2888 that does, convert to the data type and then build the template. */
2889 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2890 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2892 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2897 tree_cons (TYPE_FIELDS (type),
2898 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2899 obj_type, NULL_TREE),
2900 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2901 convert (obj_type, expr), NULL_TREE)));
2904 /* There are some special cases of expressions that we process
2906 switch (TREE_CODE (expr))
2911 case TRANSFORM_EXPR:
2913 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2914 conversion in gnat_expand_expr. NULL_EXPR does not represent
2915 and actual value, so no conversion is needed. */
2916 TREE_TYPE (expr) = type;
2921 /* If we are converting a STRING_CST to another constrained array type,
2922 just make a new one in the proper type. Likewise for a
2923 CONSTRUCTOR. But if the mode of the type is different, we must
2924 ensure a new RTL is made for the constant. */
2925 if (code == ecode && AGGREGATE_TYPE_P (etype)
2926 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2927 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2929 expr = copy_node (expr);
2930 TREE_TYPE (expr) = type;
2932 if (TYPE_MODE (type) != TYPE_MODE (etype))
2933 TREE_CST_RTL (expr) = 0;
2940 /* If we are converting between two aggregate types of the same
2941 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2942 This avoid unneeded conversions which makes reference computations
2944 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2945 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2946 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2947 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2948 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2949 TREE_OPERAND (expr, 1));
2953 case UNCONSTRAINED_ARRAY_REF:
2954 /* Convert this to the type of the inner array by getting the address of
2955 the array from the template. */
2956 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2957 build_component_ref (TREE_OPERAND (expr, 0),
2958 get_identifier ("P_ARRAY"),
2960 etype = TREE_TYPE (expr);
2961 ecode = TREE_CODE (etype);
2964 case VIEW_CONVERT_EXPR:
2965 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2966 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2967 return convert (type, TREE_OPERAND (expr, 0));
2971 /* If both types are record types, just convert the pointer and
2972 make a new INDIRECT_REF.
2974 ??? Disable this for now since it causes problems with the
2975 code in build_binary_op for MODIFY_EXPR which wants to
2976 strip off conversions. But that code really is a mess and
2977 we need to do this a much better way some time. */
2979 && (TREE_CODE (type) == RECORD_TYPE
2980 || TREE_CODE (type) == UNION_TYPE)
2981 && (TREE_CODE (etype) == RECORD_TYPE
2982 || TREE_CODE (etype) == UNION_TYPE)
2983 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2984 return build_unary_op (INDIRECT_REF, NULL_TREE,
2985 convert (build_pointer_type (type),
2986 TREE_OPERAND (expr, 0)));
2993 /* Check for converting to a pointer to an unconstrained array. */
2994 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2995 return convert_to_fat_pointer (type, expr);
2997 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2998 || (code == INTEGER_CST && ecode == INTEGER_CST
2999 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3000 return fold (build1 (NOP_EXPR, type, expr));
3005 return build1 (CONVERT_EXPR, type, expr);
3008 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3009 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
3010 return unchecked_convert (type, expr);
3011 else if (TYPE_BIASED_REPRESENTATION_P (type))
3012 return fold (build1 (CONVERT_EXPR, type,
3013 fold (build (MINUS_EXPR, TREE_TYPE (type),
3014 convert (TREE_TYPE (type), expr),
3015 TYPE_MIN_VALUE (type)))));
3017 /* ... fall through ... */
3020 return fold (convert_to_integer (type, expr));
3023 case REFERENCE_TYPE:
3024 /* If converting between two pointers to records denoting
3025 both a template and type, adjust if needed to account
3026 for any differing offsets, since one might be negative. */
3027 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3030 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3031 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3032 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3033 sbitsize_int (BITS_PER_UNIT));
3035 expr = build1 (NOP_EXPR, type, expr);
3036 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3037 if (integer_zerop (byte_diff))
3040 return build_binary_op (PLUS_EXPR, type, expr,
3041 fold (convert_to_pointer (type, byte_diff)));
3044 /* If converting to a thin pointer, handle specially. */
3045 if (TYPE_THIN_POINTER_P (type)
3046 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3047 return convert_to_thin_pointer (type, expr);
3049 /* If converting fat pointer to normal pointer, get the pointer to the
3050 array and then convert it. */
3051 else if (TYPE_FAT_POINTER_P (etype))
3052 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3055 return fold (convert_to_pointer (type, expr));
3058 return fold (convert_to_real (type, expr));
3061 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3064 (type, tree_cons (TYPE_FIELDS (type),
3065 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3068 /* ... fall through ... */
3071 /* In these cases, assume the front-end has validated the conversion.
3072 If the conversion is valid, it will be a bit-wise conversion, so
3073 it can be viewed as an unchecked conversion. */
3074 return unchecked_convert (type, expr);
3077 /* Just validate that the type is indeed that of a field
3078 of the type. Then make the simple conversion. */
3079 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3080 if (TREE_TYPE (tem) == etype)
3081 return build1 (CONVERT_EXPR, type, expr);
3085 case UNCONSTRAINED_ARRAY_TYPE:
3086 /* If EXPR is a constrained array, take its address, convert it to a
3087 fat pointer, and then dereference it. Likewise if EXPR is a
3088 record containing both a template and a constrained array.
3089 Note that a record representing a left justified modular type
3090 always represents a packed constrained array. */
3091 if (ecode == ARRAY_TYPE
3092 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3093 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3094 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3097 (INDIRECT_REF, NULL_TREE,
3098 convert_to_fat_pointer (TREE_TYPE (type),
3099 build_unary_op (ADDR_EXPR,
3102 /* Do something very similar for converting one unconstrained
3103 array to another. */
3104 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3106 build_unary_op (INDIRECT_REF, NULL_TREE,
3107 convert (TREE_TYPE (type),
3108 build_unary_op (ADDR_EXPR,
3114 return fold (convert_to_complex (type, expr));
3121 /* Remove all conversions that are done in EXP. This includes converting
3122 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3123 is nonzero, always return the address of the containing object even if
3124 the address is not bit-aligned. */
3127 remove_conversions (exp, true_address)
3131 switch (TREE_CODE (exp))
3135 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3136 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3137 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3141 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3142 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3143 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3146 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3147 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3148 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3157 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3158 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3159 likewise return an expression pointing to the underlying array. */
3162 maybe_unconstrained_array (exp)
3165 enum tree_code code = TREE_CODE (exp);
3168 switch (TREE_CODE (TREE_TYPE (exp)))
3170 case UNCONSTRAINED_ARRAY_TYPE:
3171 if (code == UNCONSTRAINED_ARRAY_REF)
3174 = build_unary_op (INDIRECT_REF, NULL_TREE,
3175 build_component_ref (TREE_OPERAND (exp, 0),
3176 get_identifier ("P_ARRAY"),
3178 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3182 else if (code == NULL_EXPR)
3183 return build1 (NULL_EXPR,
3184 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3185 (TREE_TYPE (TREE_TYPE (exp))))),
3186 TREE_OPERAND (exp, 0));
3188 else if (code == WITH_RECORD_EXPR
3189 && (TREE_OPERAND (exp, 0)
3190 != (new = maybe_unconstrained_array
3191 (TREE_OPERAND (exp, 0)))))
3192 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3193 TREE_OPERAND (exp, 1));
3196 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3199 = build_component_ref (exp, NULL_TREE,
3200 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3201 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3202 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3203 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3216 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3219 unchecked_convert (type, expr)
3223 tree etype = TREE_TYPE (expr);
3225 /* If the expression is already the right type, we are done. */
3229 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3231 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3232 return build (WITH_RECORD_EXPR, type,
3233 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3234 TREE_OPERAND (expr, 1));
3236 /* If both types types are integral just do a normal conversion.
3237 Likewise for a conversion to an unconstrained array. */
3238 if ((((INTEGRAL_TYPE_P (type)
3239 && ! (TREE_CODE (type) == INTEGER_TYPE
3240 && TYPE_VAX_FLOATING_POINT_P (type)))
3241 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3242 || (TREE_CODE (type) == RECORD_TYPE
3243 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3244 && ((INTEGRAL_TYPE_P (etype)
3245 && ! (TREE_CODE (etype) == INTEGER_TYPE
3246 && TYPE_VAX_FLOATING_POINT_P (etype)))
3247 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3248 || (TREE_CODE (etype) == RECORD_TYPE
3249 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3250 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3254 if (TREE_CODE (etype) == INTEGER_TYPE
3255 && TYPE_BIASED_REPRESENTATION_P (etype))
3257 tree ntype = copy_type (etype);
3259 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3260 TYPE_MAIN_VARIANT (ntype) = ntype;
3261 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3264 if (TREE_CODE (type) == INTEGER_TYPE
3265 && TYPE_BIASED_REPRESENTATION_P (type))
3267 rtype = copy_type (type);
3268 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3269 TYPE_MAIN_VARIANT (rtype) = rtype;
3272 expr = convert (rtype, expr);
3274 expr = build1 (GNAT_NOP_EXPR, type, expr);
3277 /* If we are converting TO an integral type whose precision is not the
3278 same as its size, first unchecked convert to a record that contains
3279 an object of the output type. Then extract the field. */
3280 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3281 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3282 GET_MODE_BITSIZE (TYPE_MODE (type))))
3284 tree rec_type = make_node (RECORD_TYPE);
3285 tree field = create_field_decl (get_identifier ("OBJ"), type,
3286 rec_type, 1, 0, 0, 0);
3288 TYPE_FIELDS (rec_type) = field;
3289 layout_type (rec_type);
3291 expr = unchecked_convert (rec_type, expr);
3292 expr = build_component_ref (expr, NULL_TREE, field);
3295 /* Similarly for integral input type whose precision is not equal to its
3297 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3298 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3299 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3301 tree rec_type = make_node (RECORD_TYPE);
3303 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3306 TYPE_FIELDS (rec_type) = field;
3307 layout_type (rec_type);
3309 expr = build_constructor (rec_type, build_tree_list (field, expr));
3310 expr = unchecked_convert (type, expr);
3313 /* We have a special case when we are converting between two
3314 unconstrained array types. In that case, take the address,
3315 convert the fat pointer types, and dereference. */
3316 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3317 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3318 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3319 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3320 build_unary_op (ADDR_EXPR, NULL_TREE,
3324 expr = maybe_unconstrained_array (expr);
3325 etype = TREE_TYPE (expr);
3326 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3329 /* If the result is an integral type whose size is not equal to
3330 the size of the underlying machine type, sign- or zero-extend
3331 the result. We need not do this in the case where the input is
3332 an integral type of the same precision and signedness or if the output
3333 is a biased type or if both the input and output are unsigned. */
3334 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3335 && ! (TREE_CODE (type) == INTEGER_TYPE
3336 && TYPE_BIASED_REPRESENTATION_P (type))
3337 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3338 GET_MODE_BITSIZE (TYPE_MODE (type)))
3339 && ! (INTEGRAL_TYPE_P (etype)
3340 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3341 && operand_equal_p (TYPE_RM_SIZE (type),
3342 (TYPE_RM_SIZE (etype) != 0
3343 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3345 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3347 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3348 TREE_UNSIGNED (type));
3350 = convert (base_type,
3351 size_binop (MINUS_EXPR,
3353 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3354 TYPE_RM_SIZE (type)));
3357 build_binary_op (RSHIFT_EXPR, base_type,
3358 build_binary_op (LSHIFT_EXPR, base_type,
3359 convert (base_type, expr),
3364 /* An unchecked conversion should never raise Constraint_Error. The code
3365 below assumes that GCC's conversion routines overflow the same way that
3366 the underlying hardware does. This is probably true. In the rare case
3367 when it is false, we can rely on the fact that such conversions are
3368 erroneous anyway. */
3369 if (TREE_CODE (expr) == INTEGER_CST)
3370 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3372 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3373 show no longer constant. */
3374 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3375 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3376 TREE_CONSTANT (expr) = 0;