1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
26 ****************************************************************************/
52 #ifndef MAX_FIXED_MODE_SIZE
53 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
56 #ifndef MAX_BITS_PER_WORD
57 #define MAX_BITS_PER_WORD BITS_PER_WORD
60 /* If nonzero, pretend we are allocating at global level. */
63 /* Tree nodes for the various types and decls we create. */
64 tree gnat_std_decls[(int) ADT_LAST];
66 /* Functions to call for each of the possible raise reasons. */
67 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
69 /* Associates a GNAT tree node to a GCC tree node. It is used in
70 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
71 of `save_gnu_tree' for more info. */
72 static tree *associate_gnat_to_gnu;
74 /* This listhead is used to record any global objects that need elaboration.
75 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
76 initial value to assign. */
78 static tree pending_elaborations;
80 /* This stack allows us to momentarily switch to generating elaboration
81 lists for an inner context. */
83 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
85 /* This variable keeps a table for types for each precision so that we only
86 allocate each of them once. Signed and unsigned types are kept separate.
88 Note that these types are only used when fold-const requests something
89 special. Perhaps we should NOT share these types; we'll see how it
91 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
93 /* Likewise for float types, but record these by mode. */
94 static tree float_types[NUM_MACHINE_MODES];
96 /* For each binding contour we allocate a binding_level structure which records
97 the entities defined or declared in that contour. Contours include:
100 one for each subprogram definition
101 one for each compound statement (declare block)
103 Binding contours are used to create GCC tree BLOCK nodes. */
107 /* A chain of ..._DECL nodes for all variables, constants, functions,
108 parameters and type declarations. These ..._DECL nodes are chained
109 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
110 in the reverse of the order supplied to be compatible with the
113 /* For each level (except the global one), a chain of BLOCK nodes for all
114 the levels that were entered and exited one level down from this one. */
116 /* The BLOCK node for this level, if one has been preallocated.
117 If 0, the BLOCK is allocated (if needed) when the level is popped. */
119 /* The binding level containing this one (the enclosing binding level). */
120 struct binding_level *level_chain;
123 /* The binding level currently in effect. */
124 static struct binding_level *current_binding_level = NULL;
126 /* A chain of binding_level structures awaiting reuse. */
127 static struct binding_level *free_binding_level = NULL;
129 /* The outermost binding level. This binding level is created when the
130 compiler is started and it will exist through the entire compilation. */
131 static struct binding_level *global_binding_level;
133 /* Binding level structures are initialized by copying this one. */
134 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
136 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
137 static tree compute_related_constant PARAMS ((tree, tree));
138 static tree split_plus PARAMS ((tree, tree *));
139 static int value_zerop PARAMS ((tree));
140 static tree float_type_for_size PARAMS ((int, enum machine_mode));
141 static tree convert_to_fat_pointer PARAMS ((tree, tree));
142 static tree convert_to_thin_pointer PARAMS ((tree, tree));
143 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
145 static void mark_binding_level PARAMS ((PTR));
146 static void mark_e_stack PARAMS ((PTR));
148 /* Initialize the association of GNAT nodes to GCC trees. */
155 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
156 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
158 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
159 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
161 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
162 ggc_add_tree_root (&pending_elaborations, 1);
163 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
164 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
165 (sizeof signed_and_unsigned_types
166 / sizeof signed_and_unsigned_types[0][0]));
167 ggc_add_tree_root (float_types, ARRAY_SIZE (float_types));
169 ggc_add_root (¤t_binding_level, 1, sizeof current_binding_level,
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
177 If GNU_DECL is zero, a previous association is to be reset. */
180 save_gnu_tree (gnat_entity, gnu_decl, no_check)
181 Entity_Id gnat_entity;
186 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
187 || (! no_check && ! DECL_P (gnu_decl))))
190 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
201 get_gnu_tree (gnat_entity)
202 Entity_Id gnat_entity;
204 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
207 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
213 present_gnu_tree (gnat_entity)
214 Entity_Id gnat_entity;
216 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
220 /* Return non-zero if we are currently in the global binding level. */
225 return (force_global != 0 || current_binding_level == global_binding_level
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
235 return current_binding_level->names;
238 /* Nonzero if the current level needs to have a BLOCK made. */
243 return (current_binding_level->names != 0);
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
251 int ignore ATTRIBUTE_UNUSED;
253 struct binding_level *newlevel = NULL;
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level)
258 newlevel = free_binding_level;
259 free_binding_level = free_binding_level->level_chain;
263 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
265 *newlevel = clear_binding_level;
267 /* Add this level to the front of the chain (stack) of levels that are
269 newlevel->level_chain = current_binding_level;
270 current_binding_level = newlevel;
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
289 poplevel (keep, reverse, functionbody)
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
297 tree block = NULL_TREE;
300 tree subblock_chain = current_binding_level->blocks;
302 int block_previously_created;
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
308 current_binding_level->names
309 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
310 : current_binding_level->names;
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
315 if (TREE_CODE (decl_node) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
317 && DECL_INITIAL (decl_node) != 0)
319 push_function_context ();
320 output_inline_function (decl_node);
321 pop_function_context ();
325 block_previously_created = (current_binding_level->this_block != 0);
326 if (block_previously_created)
327 block = current_binding_level->this_block;
328 else if (keep || functionbody)
329 block = make_node (BLOCK);
332 BLOCK_VARS (block) = keep ? decl_chain : 0;
333 BLOCK_SUBBLOCKS (block) = subblock_chain;
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node = subblock_chain; subblock_node;
338 subblock_node = TREE_CHAIN (subblock_node))
339 BLOCK_SUPERCONTEXT (subblock_node) = block;
341 /* Clear out the meanings of the local variables of this level. */
343 for (subblock_node = decl_chain; subblock_node;
344 subblock_node = TREE_CHAIN (subblock_node))
345 if (DECL_NAME (subblock_node) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node))
350 if (TREE_USED (subblock_node))
351 TREE_USED (DECL_NAME (subblock_node)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level *level = current_binding_level;
359 current_binding_level = current_binding_level->level_chain;
360 level->level_chain = free_binding_level;
361 free_binding_level = level;
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
370 DECL_INITIAL (current_function_decl) = block;
371 BLOCK_VARS (block) = 0;
375 if (!block_previously_created)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, block);
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
384 else if (subblock_chain)
385 current_binding_level->blocks
386 = chainon (current_binding_level->blocks, subblock_chain);
388 TREE_USED (block) = 1;
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
401 TREE_USED (block) = 1;
402 current_binding_level->blocks
403 = chainon (current_binding_level->blocks, block);
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
413 current_binding_level->this_block = block;
414 current_binding_level->names = chainon (current_binding_level->names,
416 current_binding_level->blocks = chainon (current_binding_level->blocks,
417 BLOCK_SUBBLOCKS (block));
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
427 struct binding_level *b;
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
433 b = global_binding_level;
434 DECL_CONTEXT (decl) = 0;
438 b = current_binding_level;
439 DECL_CONTEXT (decl) = current_function_decl;
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 TREE_CHAIN (decl) = b->names;
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
464 if (TREE_CODE (decl) == TYPE_DECL
465 && DECL_NAME (decl) != 0
466 && (TYPE_NAME (TREE_TYPE (decl)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
470 && ! DECL_ARTIFICIAL (decl))))
471 TYPE_NAME (TREE_TYPE (decl)) = decl;
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
480 gnat_init_decl_processing ()
484 /* Make the binding_level structure for global names. */
485 current_function_decl = 0;
486 current_binding_level = 0;
487 free_binding_level = 0;
489 global_binding_level = current_binding_level;
491 build_common_tree_nodes (0);
493 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
494 corresponding to the size of ptr_mode. Make this here since we need
495 this before we can expand the GNAT types. */
496 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
497 build_common_tree_nodes_2 (0);
499 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
501 /* We need to make the integer type before doing anything else.
502 We stitch this in to the appropriate GNAT type later. */
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
505 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
508 ptr_void_type_node = build_pointer_type (void_type_node);
512 /* Create the predefined scalar types such as `integer_type_node' needed
513 in the gcc back-end and initialize the global binding level. */
516 init_gigi_decls (long_long_float_type, exception_type)
517 tree long_long_float_type, exception_type;
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
533 longest_float_type_node));
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
538 except_type_node = TREE_TYPE (exception_type);
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
542 unsigned_type_node));
545 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
554 /* malloc is a function declaration tree for a function to allocate
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
562 NULL_TREE, 0, 1, 1, 0);
564 /* free is a function declaration tree for a function to free memory. */
567 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
568 build_function_type (void_type_node,
569 tree_cons (NULL_TREE,
572 NULL_TREE, 0, 1, 1, 0);
574 /* Make the types and functions used for exception processing. */
576 = build_array_type (gnat_type_for_mode (Pmode, 0),
577 build_index_type (build_int_2 (5, 0)));
578 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
579 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
581 /* Functions to get and set the jumpbuf pointer for the current thread. */
583 = create_subprog_decl
584 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
585 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
586 NULL_TREE, 0, 1, 1, 0);
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
592 build_function_type (void_type_node,
593 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
594 NULL_TREE, 0, 1, 1, 0);
596 /* Function to get the current exception. */
598 = create_subprog_decl
599 (get_identifier ("system__soft_links__get_gnat_exception"),
601 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
602 NULL_TREE, 0, 1, 1, 0);
604 /* Functions that raise exceptions. */
606 = create_subprog_decl
607 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
608 build_function_type (void_type_node,
609 tree_cons (NULL_TREE,
610 build_pointer_type (except_type_node),
612 NULL_TREE, 0, 1, 1, 0);
614 /* If in no exception handlers mode, all raise statements are redirected to
615 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
616 this procedure will never be called in this mode. */
617 if (No_Exception_Handlers_Set ())
620 = create_subprog_decl
621 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
622 build_function_type (void_type_node,
623 tree_cons (NULL_TREE,
624 build_pointer_type (char_type_node),
625 tree_cons (NULL_TREE,
628 NULL_TREE, 0, 1, 1, 0);
630 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0];
632 gnat_raise_decls[i] = decl;
635 /* Otherwise, make one decl for each exception reason. */
636 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
640 sprintf (name, "__gnat_rcheck_%.2d", i);
642 = create_subprog_decl
643 (get_identifier (name), NULL_TREE,
644 build_function_type (void_type_node,
645 tree_cons (NULL_TREE,
648 tree_cons (NULL_TREE,
651 NULL_TREE, 0, 1, 1, 0);
654 /* Indicate that these never return. */
655 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
656 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
657 TREE_TYPE (raise_nodefer_decl)
658 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
661 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
663 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
664 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
665 TREE_TYPE (gnat_raise_decls[i])
666 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
670 /* setjmp returns an integer and has one operand, which is a pointer to
673 = create_subprog_decl
674 (get_identifier ("setjmp"), NULL_TREE,
675 build_function_type (integer_type_node,
676 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
677 NULL_TREE, 0, 1, 1, 0);
679 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
680 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
682 main_identifier_node = get_identifier ("main");
684 ggc_add_tree_root (gnat_std_decls, ARRAY_SIZE (gnat_std_decls));
685 ggc_add_tree_root (gnat_raise_decls, ARRAY_SIZE (gnat_raise_decls));
688 /* This function is called indirectly from toplev.c to handle incomplete
689 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
690 compile_file in toplev.c makes an indirect call through the function pointer
691 incomplete_decl_finalize_hook which is initialized to this routine in
692 init_decl_processing. */
695 gnat_finish_incomplete_decl (dont_care)
696 tree dont_care ATTRIBUTE_UNUSED;
701 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
702 nodes (FIELDLIST), finish constructing the record or union type.
703 If HAS_REP is nonzero, this record has a rep clause; don't call
704 layout_type but merely set the size and alignment ourselves.
705 If DEFER_DEBUG is nonzero, do not call the debugging routines
706 on this type; it will be done later. */
709 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
715 enum tree_code code = TREE_CODE (record_type);
716 tree ada_size = bitsize_zero_node;
717 tree size = bitsize_zero_node;
718 tree size_unit = size_zero_node;
722 TYPE_FIELDS (record_type) = fieldlist;
724 if (TYPE_NAME (record_type) != 0
725 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
726 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
728 TYPE_STUB_DECL (record_type)
729 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
732 /* We don't need both the typedef name and the record name output in
733 the debugging information, since they are the same. */
734 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
736 /* Globally initialize the record first. If this is a rep'ed record,
737 that just means some initializations; otherwise, layout the record. */
741 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
742 TYPE_MODE (record_type) = BLKmode;
743 if (TYPE_SIZE (record_type) == 0)
745 TYPE_SIZE (record_type) = bitsize_zero_node;
746 TYPE_SIZE_UNIT (record_type) = size_zero_node;
751 /* Ensure there isn't a size already set. There can be in an error
752 case where there is a rep clause but all fields have errors and
753 no longer have a position. */
754 TYPE_SIZE (record_type) = 0;
755 layout_type (record_type);
758 /* At this point, the position and size of each field is known. It was
759 either set before entry by a rep clause, or by laying out the type
760 above. We now make a pass through the fields (in reverse order for
761 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
762 (for rep'ed records that are not padding types); and the mode (for
765 if (code == QUAL_UNION_TYPE)
766 fieldlist = nreverse (fieldlist);
768 for (field = fieldlist; field; field = TREE_CHAIN (field))
770 tree type = TREE_TYPE (field);
771 tree this_size = DECL_SIZE (field);
772 tree this_size_unit = DECL_SIZE_UNIT (field);
773 tree this_ada_size = DECL_SIZE (field);
775 /* We need to make an XVE/XVU record if any field has variable size,
776 whether or not the record does. For example, if we have an union,
777 it may be that all fields, rounded up to the alignment, have the
778 same size, in which case we'll use that size. But the debug
779 output routines (except Dwarf2) won't be able to output the fields,
780 so we need to make the special record. */
781 if (TREE_CODE (this_size) != INTEGER_CST)
784 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
785 || TREE_CODE (type) == QUAL_UNION_TYPE)
786 && ! TYPE_IS_FAT_POINTER_P (type)
787 && ! TYPE_CONTAINS_TEMPLATE_P (type)
788 && TYPE_ADA_SIZE (type) != 0)
789 this_ada_size = TYPE_ADA_SIZE (type);
791 if (has_rep && ! DECL_BIT_FIELD (field))
792 TYPE_ALIGN (record_type)
793 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
798 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
799 size = size_binop (MAX_EXPR, size, this_size);
800 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
803 case QUAL_UNION_TYPE:
805 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
806 this_ada_size, ada_size));
807 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
809 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
810 this_size_unit, size_unit));
814 /* Since we know here that all fields are sorted in order of
815 increasing bit position, the size of the record is one
816 higher than the ending bit of the last field processed
817 unless we have a rep clause, since in that case we might
818 have a field outside a QUAL_UNION_TYPE that has a higher ending
819 position. So use a MAX in that case. Also, if this field is a
820 QUAL_UNION_TYPE, we need to take into account the previous size in
821 the case of empty variants. */
823 = merge_sizes (ada_size, bit_position (field), this_ada_size,
824 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
825 size = merge_sizes (size, bit_position (field), this_size,
826 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
828 = merge_sizes (size_unit, byte_position (field), this_size_unit,
829 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
837 if (code == QUAL_UNION_TYPE)
838 nreverse (fieldlist);
840 /* If this is a padding record, we never want to make the size smaller than
841 what was specified in it, if any. */
842 if (TREE_CODE (record_type) == RECORD_TYPE
843 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
845 size = TYPE_SIZE (record_type);
846 size_unit = TYPE_SIZE_UNIT (record_type);
849 /* Now set any of the values we've just computed that apply. */
850 if (! TYPE_IS_FAT_POINTER_P (record_type)
851 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
852 TYPE_ADA_SIZE (record_type) = ada_size;
854 #ifdef ROUND_TYPE_SIZE
855 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
856 size_unit = ROUND_TYPE_SIZE_UNIT (record_type, size_unit,
857 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
859 size = round_up (size, TYPE_ALIGN (record_type));
860 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
864 && ! (TREE_CODE (record_type) == RECORD_TYPE
865 && TYPE_IS_PADDING_P (record_type)
866 && TREE_CODE (size) != INTEGER_CST
867 && contains_placeholder_p (size)))
869 TYPE_SIZE (record_type) = size;
870 TYPE_SIZE_UNIT (record_type) = size_unit;
874 compute_record_mode (record_type);
878 /* If this record is of variable size, rename it so that the
879 debugger knows it is and make a new, parallel, record
880 that tells the debugger how the record is laid out. See
885 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
886 ? UNION_TYPE : TREE_CODE (record_type));
887 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
889 = concat_id_with_name (orig_id,
890 TREE_CODE (record_type) == QUAL_UNION_TYPE
892 tree last_pos = bitsize_zero_node;
895 TYPE_NAME (new_record_type) = new_id;
896 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
897 TYPE_STUB_DECL (new_record_type)
898 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
899 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
900 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
901 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
902 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
904 /* Now scan all the fields, replacing each field with a new
905 field corresponding to the new encoding. */
906 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
907 old_field = TREE_CHAIN (old_field))
909 tree field_type = TREE_TYPE (old_field);
910 tree field_name = DECL_NAME (old_field);
912 tree curpos = bit_position (old_field);
914 unsigned int align = 0;
917 /* See how the position was modified from the last position.
919 There are two basic cases we support: a value was added
920 to the last position or the last position was rounded to
921 a boundary and they something was added. Check for the
922 first case first. If not, see if there is any evidence
923 of rounding. If so, round the last position and try
926 If this is a union, the position can be taken as zero. */
928 if (TREE_CODE (new_record_type) == UNION_TYPE)
929 pos = bitsize_zero_node, align = 0;
931 pos = compute_related_constant (curpos, last_pos);
933 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
934 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
936 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
937 pos = compute_related_constant (curpos,
938 round_up (last_pos, align));
940 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
941 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
942 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
943 && host_integerp (TREE_OPERAND
944 (TREE_OPERAND (curpos, 0), 1),
949 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
950 pos = compute_related_constant (curpos,
951 round_up (last_pos, align));
954 /* If we can't compute a position, set it to zero.
956 ??? We really should abort here, but it's too much work
957 to get this correct for all cases. */
960 pos = bitsize_zero_node;
962 /* See if this type is variable-size and make a new type
963 and indicate the indirection if so. */
964 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
966 field_type = build_pointer_type (field_type);
970 /* Make a new field name, if necessary. */
971 if (var || align != 0)
976 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
977 align / BITS_PER_UNIT);
979 strcpy (suffix, "XVL");
981 field_name = concat_id_with_name (field_name, suffix);
984 new_field = create_field_decl (field_name, field_type,
986 DECL_SIZE (old_field), pos, 0);
987 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
988 TYPE_FIELDS (new_record_type) = new_field;
990 /* If old_field is a QUAL_UNION_TYPE, take its size as being
991 zero. The only time it's not the last field of the record
992 is when there are other components at fixed positions after
993 it (meaning there was a rep clause for every field) and we
994 want to be able to encode them. */
995 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
996 (TREE_CODE (TREE_TYPE (old_field))
999 : DECL_SIZE (old_field));
1002 TYPE_FIELDS (new_record_type)
1003 = nreverse (TYPE_FIELDS (new_record_type));
1005 rest_of_type_compilation (new_record_type, global_bindings_p ());
1008 rest_of_type_compilation (record_type, global_bindings_p ());
1012 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1013 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1014 if this represents a QUAL_UNION_TYPE in which case we must look for
1015 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1016 is nonzero, we must take the MAX of the end position of this field
1017 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1019 We return an expression for the size. */
1022 merge_sizes (last_size, first_bit, size, special, has_rep)
1024 tree first_bit, size;
1028 tree type = TREE_TYPE (last_size);
1030 if (! special || TREE_CODE (size) != COND_EXPR)
1032 tree new = size_binop (PLUS_EXPR, first_bit, size);
1035 new = size_binop (MAX_EXPR, last_size, new);
1040 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1041 integer_zerop (TREE_OPERAND (size, 1))
1042 ? last_size : merge_sizes (last_size, first_bit,
1043 TREE_OPERAND (size, 1),
1045 integer_zerop (TREE_OPERAND (size, 2))
1046 ? last_size : merge_sizes (last_size, first_bit,
1047 TREE_OPERAND (size, 2),
1051 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1052 related by the addition of a constant. Return that constant if so. */
1055 compute_related_constant (op0, op1)
1058 tree op0_var, op1_var;
1059 tree op0_con = split_plus (op0, &op0_var);
1060 tree op1_con = split_plus (op1, &op1_var);
1061 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1063 if (operand_equal_p (op0_var, op1_var, 0))
1065 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1071 /* Utility function of above to split a tree OP which may be a sum, into a
1072 constant part, which is returned, and a variable part, which is stored
1073 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1077 split_plus (in, pvar)
1081 tree result = bitsize_zero_node;
1083 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1084 in = TREE_OPERAND (in, 0);
1087 if (TREE_CODE (in) == INTEGER_CST)
1089 *pvar = bitsize_zero_node;
1092 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1094 tree lhs_var, rhs_var;
1095 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1096 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1098 result = size_binop (PLUS_EXPR, result, lhs_con);
1099 result = size_binop (TREE_CODE (in), result, rhs_con);
1101 if (lhs_var == TREE_OPERAND (in, 0)
1102 && rhs_var == TREE_OPERAND (in, 1))
1103 return bitsize_zero_node;
1105 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1109 return bitsize_zero_node;
1112 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1113 subprogram. If it is void_type_node, then we are dealing with a procedure,
1114 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1115 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1116 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1117 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1118 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1119 RETURNS_WITH_DSP is nonzero if the function is to return with a
1120 depressed stack pointer. */
1123 create_subprog_type (return_type, param_decl_list, cico_list,
1124 returns_unconstrained, returns_by_ref, returns_with_dsp)
1126 tree param_decl_list;
1128 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1130 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1131 the subprogram formal parameters. This list is generated by traversing the
1132 input list of PARM_DECL nodes. */
1133 tree param_type_list = NULL;
1137 for (param_decl = param_decl_list; param_decl;
1138 param_decl = TREE_CHAIN (param_decl))
1139 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1142 /* The list of the function parameter types has to be terminated by the void
1143 type to signal to the back-end that we are not dealing with a variable
1144 parameter subprogram, but that the subprogram has a fixed number of
1146 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1148 /* The list of argument types has been created in reverse
1150 param_type_list = nreverse (param_type_list);
1152 type = build_function_type (return_type, param_type_list);
1154 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1155 or the new type should, make a copy of TYPE. Likewise for
1156 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1157 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1158 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1159 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1160 type = copy_type (type);
1162 TYPE_CI_CO_LIST (type) = cico_list;
1163 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1164 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1165 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1169 /* Return a copy of TYPE but safe to modify in any way. */
1175 tree new = copy_node (type);
1177 /* copy_node clears this field instead of copying it, because it is
1178 aliased with TREE_CHAIN. */
1179 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1181 TYPE_POINTER_TO (new) = 0;
1182 TYPE_REFERENCE_TO (new) = 0;
1183 TYPE_MAIN_VARIANT (new) = new;
1184 TYPE_NEXT_VARIANT (new) = 0;
1189 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1190 TYPE_INDEX_TYPE is INDEX. */
1193 create_index_type (min, max, index)
1197 /* First build a type for the desired range. */
1198 tree type = build_index_2_type (min, max);
1200 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1201 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1202 is set, but not to INDEX, make a copy of this type with the requested
1203 index type. Note that we have no way of sharing these types, but that's
1204 only a small hole. */
1205 if (TYPE_INDEX_TYPE (type) == index)
1207 else if (TYPE_INDEX_TYPE (type) != 0)
1208 type = copy_type (type);
1210 TYPE_INDEX_TYPE (type) = index;
1214 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1215 string) and TYPE is a ..._TYPE node giving its data type.
1216 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1217 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1218 information about this type. */
1221 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1224 struct attrib *attr_list;
1228 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1229 enum tree_code code = TREE_CODE (type);
1231 DECL_ARTIFICIAL (type_decl) = artificial_p;
1232 pushdecl (type_decl);
1233 process_attributes (type_decl, attr_list);
1235 /* Pass type declaration information to the debugger unless this is an
1236 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1237 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1238 a dummy type, which will be completed later, or a type for which
1239 debugging information was not requested. */
1240 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1242 DECL_IGNORED_P (type_decl) = 1;
1243 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1244 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1245 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1246 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1251 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1252 ASM_NAME is its assembler name (if provided). TYPE is its data type
1253 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1254 expression; NULL_TREE if none.
1256 CONST_FLAG is nonzero if this variable is constant.
1258 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1259 the current compilation unit. This flag should be set when processing the
1260 variable definitions in a package specification. EXTERN_FLAG is nonzero
1261 when processing an external variable declaration (as opposed to a
1262 definition: no storage is to be allocated for the variable here).
1264 STATIC_FLAG is only relevant when not at top level. In that case
1265 it indicates whether to always allocate storage to the variable. */
1268 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1269 extern_flag, static_flag, attr_list)
1278 struct attrib *attr_list;
1283 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1284 && (global_bindings_p () || static_flag
1285 ? 0 != initializer_constant_valid_p (var_init,
1286 TREE_TYPE (var_init))
1287 : TREE_CONSTANT (var_init))));
1289 = build_decl ((const_flag && init_const
1290 /* Only make a CONST_DECL for sufficiently-small objects.
1291 We consider complex double "sufficiently-small" */
1292 && TYPE_SIZE (type) != 0
1293 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1294 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1295 GET_MODE_SIZE (DCmode)))
1296 ? CONST_DECL : VAR_DECL, var_name, type);
1297 tree assign_init = 0;
1299 /* If this is external, throw away any initializations unless this is a
1300 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1301 we are defining a global here, leave a constant initialization and save
1302 any variable elaborations for the elaboration routine. Otherwise, if
1303 the initializing expression is not the same as TYPE, generate the
1304 initialization with an assignment statement, since it knows how
1305 to do the required adjustents. If we are just annotating types,
1306 throw away the initialization if it isn't a constant. */
1308 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1309 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1312 if (global_bindings_p () && var_init != 0 && ! init_const)
1314 add_pending_elaborations (var_decl, var_init);
1318 else if (var_init != 0
1319 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1320 != TYPE_MAIN_VARIANT (type))
1321 || (static_flag && ! init_const)))
1322 assign_init = var_init, var_init = 0;
1324 DECL_COMMON (var_decl) = !flag_no_common;
1325 DECL_INITIAL (var_decl) = var_init;
1326 TREE_READONLY (var_decl) = const_flag;
1327 DECL_EXTERNAL (var_decl) = extern_flag;
1328 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1329 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1330 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1331 = TYPE_VOLATILE (type);
1333 /* At the global binding level we need to allocate static storage for the
1334 variable if and only if its not external. If we are not at the top level
1335 we allocate automatic storage unless requested not to. */
1336 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1339 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1341 process_attributes (var_decl, attr_list);
1343 /* Add this decl to the current binding level and generate any
1344 needed code and RTL. */
1345 var_decl = pushdecl (var_decl);
1346 expand_decl (var_decl);
1348 if (DECL_CONTEXT (var_decl) != 0)
1349 expand_decl_init (var_decl);
1351 /* If this is volatile, force it into memory. */
1352 if (TREE_SIDE_EFFECTS (var_decl))
1353 gnat_mark_addressable (var_decl);
1355 if (TREE_CODE (var_decl) != CONST_DECL)
1356 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1358 if (assign_init != 0)
1360 /* If VAR_DECL has a padded type, convert it to the unpadded
1361 type so the assignment is done properly. */
1362 tree lhs = var_decl;
1364 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1365 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1366 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1368 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1375 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1376 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1377 this field is in a record type with a "pragma pack". If SIZE is nonzero
1378 it is the specified size for this field. If POS is nonzero, it is the bit
1379 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1380 the address of this field for aliasing purposes. */
1383 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1392 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1394 DECL_CONTEXT (field_decl) = record_type;
1395 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1397 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1398 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1399 If it is a padding type where the inner field is of variable size, it
1400 must be at its natural alignment. Just handle the packed case here; we
1401 will disallow non-aligned rep clauses elsewhere. */
1402 if (packed && TYPE_MODE (field_type) == BLKmode)
1403 DECL_ALIGN (field_decl)
1404 = ((TREE_CODE (field_type) == RECORD_TYPE
1405 && TYPE_IS_PADDING_P (field_type)
1406 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1407 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1409 /* If a size is specified, use it. Otherwise, see if we have a size
1410 to use that may differ from the natural size of the object. */
1412 size = convert (bitsizetype, size);
1415 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1416 TYPE_SIZE (field_type), 0))
1417 size = rm_size (field_type);
1419 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1421 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1422 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1423 size = round_up (size, BITS_PER_UNIT);
1426 /* Make a bitfield if a size is specified for two reasons: first if the size
1427 differs from the natural size. Second, if the alignment is insufficient.
1428 There are a number of ways the latter can be true. But never make a
1429 bitfield if the type of the field has a nonconstant size. */
1431 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1432 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1433 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1435 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1436 bitsize_int (TYPE_ALIGN
1439 || (TYPE_ALIGN (record_type) != 0
1440 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1442 DECL_BIT_FIELD (field_decl) = 1;
1443 DECL_SIZE (field_decl) = size;
1444 if (! packed && pos == 0)
1445 DECL_ALIGN (field_decl)
1446 = (TYPE_ALIGN (record_type) != 0
1447 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1448 : TYPE_ALIGN (field_type));
1451 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1452 DECL_ALIGN (field_decl)
1453 = MAX (DECL_ALIGN (field_decl),
1454 DECL_BIT_FIELD (field_decl) ? 1
1455 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1456 : TYPE_ALIGN (field_type));
1460 /* We need to pass in the alignment the DECL is known to have.
1461 This is the lowest-order bit set in POS, but no more than
1462 the alignment of the record, if one is specified. Note
1463 that an alignment of 0 is taken as infinite. */
1464 unsigned int known_align;
1466 if (host_integerp (pos, 1))
1467 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1469 known_align = BITS_PER_UNIT;
1471 if (TYPE_ALIGN (record_type)
1472 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1473 known_align = TYPE_ALIGN (record_type);
1475 layout_decl (field_decl, known_align);
1476 SET_DECL_OFFSET_ALIGN (field_decl,
1477 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1479 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1480 &DECL_FIELD_BIT_OFFSET (field_decl),
1481 DECL_OFFSET_ALIGN (field_decl), pos);
1483 DECL_HAS_REP_P (field_decl) = 1;
1486 /* If the field type is passed by reference, we will have pointers to the
1487 field, so it is addressable. */
1488 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1491 /* Mark the decl as nonaddressable if it either is indicated so semantically
1492 or if it is a bit field. */
1493 DECL_NONADDRESSABLE_P (field_decl)
1494 = ! addressable || DECL_BIT_FIELD (field_decl);
1499 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1500 effects, has the value of zero. */
1506 if (TREE_CODE (exp) == COMPOUND_EXPR)
1507 return value_zerop (TREE_OPERAND (exp, 1));
1509 return integer_zerop (exp);
1512 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1513 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1514 readonly (either an IN parameter or an address of a pass-by-ref
1518 create_param_decl (param_name, param_type, readonly)
1523 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1525 DECL_ARG_TYPE (param_decl) = param_type;
1526 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1527 TREE_READONLY (param_decl) = readonly;
1531 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1534 process_attributes (decl, attr_list)
1536 struct attrib *attr_list;
1538 for (; attr_list; attr_list = attr_list->next)
1539 switch (attr_list->type)
1541 case ATTR_MACHINE_ATTRIBUTE:
1542 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1544 ATTR_FLAG_TYPE_IN_PLACE);
1547 case ATTR_LINK_ALIAS:
1548 TREE_STATIC (decl) = 1;
1549 assemble_alias (decl, attr_list->name);
1552 case ATTR_WEAK_EXTERNAL:
1554 declare_weak (decl);
1556 post_error ("?weak declarations not supported on this target",
1557 attr_list->error_point);
1560 case ATTR_LINK_SECTION:
1561 #ifdef ASM_OUTPUT_SECTION_NAME
1562 DECL_SECTION_NAME (decl)
1563 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1564 IDENTIFIER_POINTER (attr_list->name));
1565 DECL_COMMON (decl) = 0;
1567 post_error ("?section attributes are not supported for this target",
1568 attr_list->error_point);
1574 /* Add some pending elaborations on the list. */
1577 add_pending_elaborations (var_decl, var_init)
1582 Check_Elaboration_Code_Allowed (error_gnat_node);
1584 pending_elaborations
1585 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1588 /* Obtain any pending elaborations and clear the old list. */
1591 get_pending_elaborations ()
1593 /* Each thing added to the list went on the end; we want it on the
1595 tree result = TREE_CHAIN (pending_elaborations);
1597 TREE_CHAIN (pending_elaborations) = 0;
1601 /* Mark the binding level stack. */
1604 mark_binding_level (arg)
1607 struct binding_level *level = *(struct binding_level **) arg;
1609 for (; level != 0; level = level->level_chain)
1611 ggc_mark_tree (level->names);
1612 ggc_mark_tree (level->blocks);
1613 ggc_mark_tree (level->this_block);
1617 /* Mark the pending elaboration list. */
1623 struct e_stack *p = *((struct e_stack **) data);
1627 ggc_mark_tree (p->elab_list);
1628 mark_e_stack (&p->next);
1632 /* Return nonzero if there are pending elaborations. */
1635 pending_elaborations_p ()
1637 return TREE_CHAIN (pending_elaborations) != 0;
1640 /* Save a copy of the current pending elaboration list and make a new
1644 push_pending_elaborations ()
1646 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1648 p->next = elist_stack;
1649 p->elab_list = pending_elaborations;
1651 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1654 /* Pop the stack of pending elaborations. */
1657 pop_pending_elaborations ()
1659 struct e_stack *p = elist_stack;
1661 pending_elaborations = p->elab_list;
1662 elist_stack = p->next;
1666 /* Return the current position in pending_elaborations so we can insert
1667 elaborations after that point. */
1670 get_elaboration_location ()
1672 return tree_last (pending_elaborations);
1675 /* Insert the current elaborations after ELAB, which is in some elaboration
1679 insert_elaboration_list (elab)
1682 tree next = TREE_CHAIN (elab);
1684 if (TREE_CHAIN (pending_elaborations))
1686 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1687 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1688 TREE_CHAIN (pending_elaborations) = 0;
1692 /* Returns a LABEL_DECL node for LABEL_NAME. */
1695 create_label_decl (label_name)
1698 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1700 DECL_CONTEXT (label_decl) = current_function_decl;
1701 DECL_MODE (label_decl) = VOIDmode;
1702 DECL_SOURCE_LINE (label_decl) = lineno;
1703 DECL_SOURCE_FILE (label_decl) = input_filename;
1708 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1709 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1710 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1711 PARM_DECL nodes chained through the TREE_CHAIN field).
1713 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1714 appropriate fields in the FUNCTION_DECL. */
1717 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1718 inline_flag, public_flag, extern_flag, attr_list)
1722 tree param_decl_list;
1726 struct attrib *attr_list;
1728 tree return_type = TREE_TYPE (subprog_type);
1729 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1731 /* If this is a function nested inside an inlined external function, it
1732 means we aren't going to compile the outer function unless it is
1733 actually inlined, so do the same for us. */
1734 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1735 && DECL_EXTERNAL (current_function_decl))
1738 DECL_EXTERNAL (subprog_decl) = extern_flag;
1739 TREE_PUBLIC (subprog_decl) = public_flag;
1740 DECL_INLINE (subprog_decl) = inline_flag;
1741 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1742 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1743 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1744 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1745 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1748 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1750 process_attributes (subprog_decl, attr_list);
1752 /* Add this decl to the current binding level. */
1753 subprog_decl = pushdecl (subprog_decl);
1755 /* Output the assembler code and/or RTL for the declaration. */
1756 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1758 return subprog_decl;
1761 /* Count how deep we are into nested functions. This is because
1762 we shouldn't call the backend function context routines unless we
1763 are in a nested function. */
1765 static int function_nesting_depth;
1767 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1768 body. This routine needs to be invoked before processing the declarations
1769 appearing in the subprogram. */
1772 begin_subprog_body (subprog_decl)
1775 tree param_decl_list;
1779 if (function_nesting_depth++ != 0)
1780 push_function_context ();
1782 announce_function (subprog_decl);
1784 /* Make this field nonzero so further routines know that this is not
1785 tentative. error_mark_node is replaced below (in poplevel) with the
1787 DECL_INITIAL (subprog_decl) = error_mark_node;
1789 /* This function exists in static storage. This does not mean `static' in
1791 TREE_STATIC (subprog_decl) = 1;
1793 /* Enter a new binding level. */
1794 current_function_decl = subprog_decl;
1797 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1798 subprogram body) so that they can be recognized as local variables in the
1801 The list of PARM_DECL nodes is stored in the right order in
1802 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1803 which they are transmitted to `pushdecl' we need to reverse the list of
1804 PARM_DECLs if we want it to be stored in the right order. The reason why
1805 we want to make sure the PARM_DECLs are stored in the correct order is
1806 that this list will be retrieved in a few lines with a call to `getdecl'
1807 to store it back into the DECL_ARGUMENTS field. */
1808 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1810 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1812 next_param = TREE_CHAIN (param_decl);
1813 TREE_CHAIN (param_decl) = NULL;
1814 pushdecl (param_decl);
1817 /* Store back the PARM_DECL nodes. They appear in the right order. */
1818 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1820 init_function_start (subprog_decl, input_filename, lineno);
1821 expand_function_start (subprog_decl, 0);
1823 /* If this function is `main', emit a call to `__main'
1824 to run global initializers, etc. */
1825 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1826 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1827 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1828 expand_main_function ();
1831 /* Finish the definition of the current subprogram and compile it all the way
1832 to assembler language output. */
1841 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1842 = current_function_decl;
1844 /* Mark the RESULT_DECL as being in this subprogram. */
1845 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1847 expand_function_end (input_filename, lineno, 0);
1849 /* If this is a nested function, push a new GC context. That will keep
1850 local variables on the stack from being collected while we're doing
1851 the compilation of this function. */
1852 if (function_nesting_depth > 1)
1853 ggc_push_context ();
1855 rest_of_compilation (current_function_decl);
1857 if (function_nesting_depth > 1)
1861 /* If we're sure this function is defined in this file then mark it
1863 if (TREE_ASM_WRITTEN (current_function_decl))
1864 mark_fn_defined_in_this_file (current_function_decl);
1867 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1868 not be seen when we call this function and will be in
1869 unallocated memory anyway. */
1870 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1871 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1872 TREE_VALUE (cico_list) = 0;
1874 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1876 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1877 was saved for inline, in which case the DECL_RTLs are in
1878 preserved memory. */
1879 for (decl = DECL_ARGUMENTS (current_function_decl);
1880 decl != 0; decl = TREE_CHAIN (decl))
1882 SET_DECL_RTL (decl, 0);
1883 DECL_INCOMING_RTL (decl) = 0;
1886 /* Similarly, discard DECL_RTL of the return value. */
1887 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1889 /* But DECL_INITIAL must remain nonzero so we know this
1890 was an actual function definition unless toplev.c decided not
1892 if (DECL_INITIAL (current_function_decl) != 0)
1893 DECL_INITIAL (current_function_decl) = error_mark_node;
1895 DECL_ARGUMENTS (current_function_decl) = 0;
1898 /* If we are not at the bottom of the function nesting stack, pop up to
1899 the containing function. Otherwise show we aren't in any function. */
1900 if (--function_nesting_depth != 0)
1901 pop_function_context ();
1903 current_function_decl = 0;
1906 /* Return a definition for a builtin function named NAME and whose data type
1907 is TYPE. TYPE should be a function type with argument types.
1908 FUNCTION_CODE tells later passes how to compile calls to this function.
1909 See tree.h for its possible values.
1911 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1912 the name to be called if we can't opencode the function. */
1915 builtin_function (name, type, function_code, class, library_name)
1919 enum built_in_class class;
1920 const char *library_name;
1922 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1924 DECL_EXTERNAL (decl) = 1;
1925 TREE_PUBLIC (decl) = 1;
1927 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1930 DECL_BUILT_IN_CLASS (decl) = class;
1931 DECL_FUNCTION_CODE (decl) = function_code;
1935 /* Return an integer type with the number of bits of precision given by
1936 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1937 it is a signed type. */
1940 gnat_type_for_size (precision, unsignedp)
1947 if (precision <= 2 * MAX_BITS_PER_WORD
1948 && signed_and_unsigned_types[precision][unsignedp] != 0)
1949 return signed_and_unsigned_types[precision][unsignedp];
1952 t = make_unsigned_type (precision);
1954 t = make_signed_type (precision);
1956 if (precision <= 2 * MAX_BITS_PER_WORD)
1957 signed_and_unsigned_types[precision][unsignedp] = t;
1959 if (TYPE_NAME (t) == 0)
1961 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1962 TYPE_NAME (t) = get_identifier (type_name);
1968 /* Likewise for floating-point types. */
1971 float_type_for_size (precision, mode)
1973 enum machine_mode mode;
1978 if (float_types[(int) mode] != 0)
1979 return float_types[(int) mode];
1981 float_types[(int) mode] = t = make_node (REAL_TYPE);
1982 TYPE_PRECISION (t) = precision;
1985 if (TYPE_MODE (t) != mode)
1988 if (TYPE_NAME (t) == 0)
1990 sprintf (type_name, "FLOAT_%d", precision);
1991 TYPE_NAME (t) = get_identifier (type_name);
1997 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1998 an unsigned type; otherwise a signed type is returned. */
2001 gnat_type_for_mode (mode, unsignedp)
2002 enum machine_mode mode;
2005 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2006 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2008 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2011 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2014 gnat_unsigned_type (type_node)
2017 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2019 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2021 type = copy_node (type);
2022 TREE_TYPE (type) = type_node;
2024 else if (TREE_TYPE (type_node) != 0
2025 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2026 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2028 type = copy_node (type);
2029 TREE_TYPE (type) = TREE_TYPE (type_node);
2035 /* Return the signed version of a TYPE_NODE, a scalar type. */
2038 gnat_signed_type (type_node)
2041 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2043 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2045 type = copy_node (type);
2046 TREE_TYPE (type) = type_node;
2048 else if (TREE_TYPE (type_node) != 0
2049 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2050 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2052 type = copy_node (type);
2053 TREE_TYPE (type) = TREE_TYPE (type_node);
2059 /* Return a type the same as TYPE except unsigned or signed according to
2063 gnat_signed_or_unsigned_type (unsignedp, type)
2067 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2070 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2073 /* EXP is an expression for the size of an object. If this size contains
2074 discriminant references, replace them with the maximum (if MAX_P) or
2075 minimum (if ! MAX_P) possible value of the discriminant. */
2078 max_size (exp, max_p)
2082 enum tree_code code = TREE_CODE (exp);
2083 tree type = TREE_TYPE (exp);
2085 switch (TREE_CODE_CLASS (code))
2092 if (code == TREE_LIST)
2093 return tree_cons (TREE_PURPOSE (exp),
2094 max_size (TREE_VALUE (exp), max_p),
2095 TREE_CHAIN (exp) != 0
2096 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2100 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2101 modify. Otherwise, we abort since it is something we can't
2103 if (! contains_placeholder_p (exp))
2106 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2108 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2111 return max_p ? size_one_node : size_zero_node;
2116 switch (TREE_CODE_LENGTH (code))
2119 if (code == NON_LVALUE_EXPR)
2120 return max_size (TREE_OPERAND (exp, 0), max_p);
2123 fold (build1 (code, type,
2124 max_size (TREE_OPERAND (exp, 0),
2125 code == NEGATE_EXPR ? ! max_p : max_p)));
2128 if (code == RTL_EXPR)
2130 else if (code == COMPOUND_EXPR)
2131 return max_size (TREE_OPERAND (exp, 1), max_p);
2132 else if (code == WITH_RECORD_EXPR)
2136 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2137 tree rhs = max_size (TREE_OPERAND (exp, 1),
2138 code == MINUS_EXPR ? ! max_p : max_p);
2140 /* Special-case wanting the maximum value of a MIN_EXPR.
2141 In that case, if one side overflows, return the other.
2142 sizetype is signed, but we know sizes are non-negative.
2143 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2144 overflowing or the maximum possible value and the RHS
2146 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2148 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2150 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2151 && (TREE_OVERFLOW (lhs)
2152 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2153 && ! TREE_CONSTANT (rhs))
2156 return fold (build (code, type, lhs, rhs));
2160 if (code == SAVE_EXPR)
2162 else if (code == COND_EXPR)
2163 return fold (build (MAX_EXPR, type,
2164 max_size (TREE_OPERAND (exp, 1), max_p),
2165 max_size (TREE_OPERAND (exp, 2), max_p)));
2166 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2167 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2168 max_size (TREE_OPERAND (exp, 1), max_p));
2175 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2176 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2177 Return a constructor for the template. */
2180 build_template (template_type, array_type, expr)
2185 tree template_elts = NULL_TREE;
2186 tree bound_list = NULL_TREE;
2189 if (TREE_CODE (array_type) == RECORD_TYPE
2190 && (TYPE_IS_PADDING_P (array_type)
2191 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2192 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2194 if (TREE_CODE (array_type) == ARRAY_TYPE
2195 || (TREE_CODE (array_type) == INTEGER_TYPE
2196 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2197 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2199 /* First make the list for a CONSTRUCTOR for the template. Go down the
2200 field list of the template instead of the type chain because this
2201 array might be an Ada array of arrays and we can't tell where the
2202 nested arrays stop being the underlying object. */
2204 for (field = TYPE_FIELDS (template_type); field;
2206 ? (bound_list = TREE_CHAIN (bound_list))
2207 : (array_type = TREE_TYPE (array_type))),
2208 field = TREE_CHAIN (TREE_CHAIN (field)))
2210 tree bounds, min, max;
2212 /* If we have a bound list, get the bounds from there. Likewise
2213 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2214 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2215 This will give us a maximum range. */
2216 if (bound_list != 0)
2217 bounds = TREE_VALUE (bound_list);
2218 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2219 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2220 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2221 && DECL_BY_COMPONENT_PTR_P (expr))
2222 bounds = TREE_TYPE (field);
2226 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2227 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2229 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2230 surround them with a WITH_RECORD_EXPR giving EXPR as the
2232 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2233 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2234 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2235 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2237 template_elts = tree_cons (TREE_CHAIN (field), max,
2238 tree_cons (field, min, template_elts));
2241 return build_constructor (template_type, nreverse (template_elts));
2244 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2245 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2246 in the type contains in its DECL_INITIAL the expression to use when
2247 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2248 to print out an error message if the mechanism cannot be applied to
2249 an object of that type and also for the name. */
2252 build_vms_descriptor (type, mech, gnat_entity)
2254 Mechanism_Type mech;
2255 Entity_Id gnat_entity;
2257 tree record_type = make_node (RECORD_TYPE);
2258 tree field_list = 0;
2267 /* If TYPE is an unconstrained array, use the underlying array type. */
2268 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2269 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2271 /* If this is an array, compute the number of dimensions in the array,
2272 get the index types, and point to the inner type. */
2273 if (TREE_CODE (type) != ARRAY_TYPE)
2276 for (ndim = 1, inner_type = type;
2277 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2278 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2279 ndim++, inner_type = TREE_TYPE (inner_type))
2282 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2284 if (mech != By_Descriptor_NCA
2285 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2286 for (i = ndim - 1, inner_type = type;
2288 i--, inner_type = TREE_TYPE (inner_type))
2289 idx_arr[i] = TYPE_DOMAIN (inner_type);
2291 for (i = 0, inner_type = type;
2293 i++, inner_type = TREE_TYPE (inner_type))
2294 idx_arr[i] = TYPE_DOMAIN (inner_type);
2296 /* Now get the DTYPE value. */
2297 switch (TREE_CODE (type))
2301 if (TYPE_VAX_FLOATING_POINT_P (type))
2302 switch ((int) TYPE_DIGITS_VALUE (type))
2315 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2318 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2321 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2324 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2327 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2330 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2336 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2340 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2341 && TYPE_VAX_FLOATING_POINT_P (type))
2342 switch ((int) TYPE_DIGITS_VALUE (type))
2354 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2365 /* Get the CLASS value. */
2368 case By_Descriptor_A:
2371 case By_Descriptor_NCA:
2374 case By_Descriptor_SB:
2381 /* Make the type for a descriptor for VMS. The first four fields
2382 are the same for all types. */
2385 = chainon (field_list,
2386 make_descriptor_field
2387 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2388 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2390 field_list = chainon (field_list,
2391 make_descriptor_field ("DTYPE",
2392 gnat_type_for_size (8, 1),
2393 record_type, size_int (dtype)));
2394 field_list = chainon (field_list,
2395 make_descriptor_field ("CLASS",
2396 gnat_type_for_size (8, 1),
2397 record_type, size_int (class)));
2400 = chainon (field_list,
2401 make_descriptor_field ("POINTER",
2402 build_pointer_type (type),
2405 build_pointer_type (type),
2406 build (PLACEHOLDER_EXPR,
2412 case By_Descriptor_S:
2415 case By_Descriptor_SB:
2417 = chainon (field_list,
2418 make_descriptor_field
2419 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2420 TREE_CODE (type) == ARRAY_TYPE
2421 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2423 = chainon (field_list,
2424 make_descriptor_field
2425 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2426 TREE_CODE (type) == ARRAY_TYPE
2427 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2430 case By_Descriptor_A:
2431 case By_Descriptor_NCA:
2432 field_list = chainon (field_list,
2433 make_descriptor_field ("SCALE",
2434 gnat_type_for_size (8, 1),
2438 field_list = chainon (field_list,
2439 make_descriptor_field ("DIGITS",
2440 gnat_type_for_size (8, 1),
2445 = chainon (field_list,
2446 make_descriptor_field
2447 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2448 size_int (mech == By_Descriptor_NCA
2450 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2451 : (TREE_CODE (type) == ARRAY_TYPE
2452 && TYPE_CONVENTION_FORTRAN_P (type)
2455 field_list = chainon (field_list,
2456 make_descriptor_field ("DIMCT",
2457 gnat_type_for_size (8, 1),
2461 field_list = chainon (field_list,
2462 make_descriptor_field ("ARSIZE",
2463 gnat_type_for_size (32, 1),
2465 size_in_bytes (type)));
2467 /* Now build a pointer to the 0,0,0... element. */
2468 tem = build (PLACEHOLDER_EXPR, type);
2469 for (i = 0, inner_type = type; i < ndim;
2470 i++, inner_type = TREE_TYPE (inner_type))
2471 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2472 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2475 = chainon (field_list,
2476 make_descriptor_field
2477 ("A0", build_pointer_type (inner_type), record_type,
2478 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2480 /* Next come the addressing coefficients. */
2482 for (i = 0; i < ndim; i++)
2486 = size_binop (MULT_EXPR, tem,
2487 size_binop (PLUS_EXPR,
2488 size_binop (MINUS_EXPR,
2489 TYPE_MAX_VALUE (idx_arr[i]),
2490 TYPE_MIN_VALUE (idx_arr[i])),
2493 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2494 fname[1] = '0' + i, fname[2] = 0;
2496 = chainon (field_list,
2497 make_descriptor_field (fname,
2498 gnat_type_for_size (32, 1),
2499 record_type, idx_length));
2501 if (mech == By_Descriptor_NCA)
2505 /* Finally here are the bounds. */
2506 for (i = 0; i < ndim; i++)
2510 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2512 = chainon (field_list,
2513 make_descriptor_field
2514 (fname, gnat_type_for_size (32, 1), record_type,
2515 TYPE_MIN_VALUE (idx_arr[i])));
2519 = chainon (field_list,
2520 make_descriptor_field
2521 (fname, gnat_type_for_size (32, 1), record_type,
2522 TYPE_MAX_VALUE (idx_arr[i])));
2527 post_error ("unsupported descriptor type for &", gnat_entity);
2530 finish_record_type (record_type, field_list, 0, 1);
2531 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2537 /* Utility routine for above code to make a field. */
2540 make_descriptor_field (name, type, rec_type, initial)
2547 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2549 DECL_INITIAL (field) = initial;
2553 /* Build a type to be used to represent an aliased object whose nominal
2554 type is an unconstrained array. This consists of a RECORD_TYPE containing
2555 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2556 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2557 is used to represent an arbitrary unconstrained object. Use NAME
2558 as the name of the record. */
2561 build_unc_object_type (template_type, object_type, name)
2566 tree type = make_node (RECORD_TYPE);
2567 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2568 template_type, type, 0, 0, 0, 1);
2569 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2572 TYPE_NAME (type) = name;
2573 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2574 finish_record_type (type,
2575 chainon (chainon (NULL_TREE, template_field),
2582 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2583 the normal case this is just two adjustments, but we have more to do
2584 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2587 update_pointer_to (old_type, new_type)
2591 tree ptr = TYPE_POINTER_TO (old_type);
2592 tree ref = TYPE_REFERENCE_TO (old_type);
2595 /* If this is the main variant, process all the other variants first. */
2596 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2597 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2598 type = TYPE_NEXT_VARIANT (type))
2599 update_pointer_to (type, new_type);
2601 /* If no pointer or reference, we are done. Otherwise, get the new type with
2602 the same qualifiers as the old type and see if it is the same as the old
2604 if (ptr == 0 && ref == 0)
2607 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2608 if (old_type == new_type)
2611 /* First handle the simple case. */
2612 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2615 TREE_TYPE (ptr) = new_type;
2616 TYPE_POINTER_TO (new_type) = ptr;
2619 TREE_TYPE (ref) = new_type;
2620 TYPE_REFERENCE_TO (new_type) = ref;
2622 if (ptr != 0 && TYPE_NAME (ptr) != 0
2623 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2624 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2625 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2626 global_bindings_p (), 0);
2627 if (ref != 0 && TYPE_NAME (ref) != 0
2628 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2629 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2630 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2631 global_bindings_p (), 0);
2634 /* Now deal with the unconstrained array case. In this case the "pointer"
2635 is actually a RECORD_TYPE where the types of both fields are
2636 pointers to void. In that case, copy the field list from the
2637 old type to the new one and update the fields' context. */
2638 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2643 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2648 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2649 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2650 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2652 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2655 ??? This is now the only use of gnat_substitute_in_type, which
2656 is now a very "heavy" routine to do this, so it should be replaced
2658 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2659 new_ref = build (COMPONENT_REF, ptr_temp_type,
2660 build (PLACEHOLDER_EXPR, ptr),
2661 TREE_CHAIN (TYPE_FIELDS (ptr)));
2664 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2665 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2666 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2668 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2669 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2671 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2672 = TREE_TYPE (new_type) = ptr;
2674 /* Now handle updating the allocation record, what the thin pointer
2675 points to. Update all pointers from the old record into the new
2676 one, update the types of the fields, and recompute the size. */
2678 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2680 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2681 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2682 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2683 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2684 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2685 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2686 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2688 TYPE_SIZE (new_obj_rec)
2689 = size_binop (PLUS_EXPR,
2690 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2691 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2692 TYPE_SIZE_UNIT (new_obj_rec)
2693 = size_binop (PLUS_EXPR,
2694 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2695 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2696 rest_of_type_compilation (ptr, global_bindings_p ());
2700 /* Convert a pointer to a constrained array into a pointer to a fat
2701 pointer. This involves making or finding a template. */
2704 convert_to_fat_pointer (type, expr)
2708 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2709 tree template, template_addr;
2710 tree etype = TREE_TYPE (expr);
2712 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2713 pointer to the template and array. */
2714 if (integer_zerop (expr))
2718 tree_cons (TYPE_FIELDS (type),
2719 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2720 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2721 convert (build_pointer_type (template_type),
2725 /* If EXPR is a thin pointer, make the template and data from the record. */
2727 else if (TYPE_THIN_POINTER_P (etype))
2729 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2731 expr = save_expr (expr);
2732 if (TREE_CODE (expr) == ADDR_EXPR)
2733 expr = TREE_OPERAND (expr, 0);
2735 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2737 template = build_component_ref (expr, NULL_TREE, fields);
2738 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2739 build_component_ref (expr, NULL_TREE,
2740 TREE_CHAIN (fields)));
2743 /* Otherwise, build the constructor for the template. */
2744 template = build_template (template_type, TREE_TYPE (etype), expr);
2746 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2748 /* The result is a CONSTRUCTOR for the fat pointer. */
2750 build_constructor (type,
2751 tree_cons (TYPE_FIELDS (type), expr,
2752 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2753 template_addr, NULL_TREE)));
2756 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2757 is something that is a fat pointer, so convert to it first if it EXPR
2758 is not already a fat pointer. */
2761 convert_to_thin_pointer (type, expr)
2765 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2767 = convert_to_fat_pointer
2768 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2770 /* We get the pointer to the data and use a NOP_EXPR to make it the
2772 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2773 expr = build1 (NOP_EXPR, type, expr);
2778 /* Create an expression whose value is that of EXPR,
2779 converted to type TYPE. The TREE_TYPE of the value
2780 is always TYPE. This function implements all reasonable
2781 conversions; callers should filter out those that are
2782 not permitted by the language being compiled. */
2785 convert (type, expr)
2788 enum tree_code code = TREE_CODE (type);
2789 tree etype = TREE_TYPE (expr);
2790 enum tree_code ecode = TREE_CODE (etype);
2793 /* If EXPR is already the right type, we are done. */
2797 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2799 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2800 return build (WITH_RECORD_EXPR, type,
2801 convert (type, TREE_OPERAND (expr, 0)),
2802 TREE_OPERAND (expr, 1));
2804 /* If the input type has padding, remove it by doing a component reference
2805 to the field. If the output type has padding, make a constructor
2806 to build the record. If both input and output have padding and are
2807 of variable size, do this as an unchecked conversion. */
2808 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2809 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2810 && (! TREE_CONSTANT (TYPE_SIZE (type))
2811 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2813 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2815 /* If we have just converted to this padded type, just get
2816 the inner expression. */
2817 if (TREE_CODE (expr) == CONSTRUCTOR
2818 && CONSTRUCTOR_ELTS (expr) != 0
2819 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2820 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2822 return convert (type, build_component_ref (expr, NULL_TREE,
2823 TYPE_FIELDS (etype)));
2825 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2827 /* If we previously converted from another type and our type is
2828 of variable size, remove the conversion to avoid the need for
2829 variable-size temporaries. */
2830 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2831 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2832 expr = TREE_OPERAND (expr, 0);
2834 /* If we are just removing the padding from expr, convert the original
2835 object if we have variable size. That will avoid the need
2836 for some variable-size temporaries. */
2837 if (TREE_CODE (expr) == COMPONENT_REF
2838 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2839 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2840 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2841 return convert (type, TREE_OPERAND (expr, 0));
2843 /* If the result type is a padded type with a self-referentially-sized
2844 field and the expression type is a record, do this as an
2845 unchecked converstion. */
2846 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2847 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2848 && TREE_CODE (etype) == RECORD_TYPE)
2849 return unchecked_convert (type, expr);
2853 build_constructor (type,
2854 tree_cons (TYPE_FIELDS (type),
2856 (TYPE_FIELDS (type)),
2861 /* If the input is a biased type, adjust first. */
2862 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2863 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2864 fold (build1 (GNAT_NOP_EXPR,
2865 TREE_TYPE (etype), expr)),
2866 TYPE_MIN_VALUE (etype))));
2868 /* If the input is a left-justified modular type, we need to extract
2869 the actual object before converting it to any other type with the
2870 exception of an unconstrained array. */
2871 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2872 && code != UNCONSTRAINED_ARRAY_TYPE)
2873 return convert (type, build_component_ref (expr, NULL_TREE,
2874 TYPE_FIELDS (etype)));
2876 /* If converting a type that does not contain a template into one
2877 that does, convert to the data type and then build the template. */
2878 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2879 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2881 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2886 tree_cons (TYPE_FIELDS (type),
2887 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2888 obj_type, NULL_TREE),
2889 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2890 convert (obj_type, expr), NULL_TREE)));
2893 /* There are some special cases of expressions that we process
2895 switch (TREE_CODE (expr))
2900 case TRANSFORM_EXPR:
2902 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2903 conversion in gnat_expand_expr. NULL_EXPR does not represent
2904 and actual value, so no conversion is needed. */
2905 TREE_TYPE (expr) = type;
2910 /* If we are converting a STRING_CST to another constrained array type,
2911 just make a new one in the proper type. Likewise for a
2912 CONSTRUCTOR. But if the mode of the type is different, we must
2913 ensure a new RTL is made for the constant. */
2914 if (code == ecode && AGGREGATE_TYPE_P (etype)
2915 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2916 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2918 expr = copy_node (expr);
2919 TREE_TYPE (expr) = type;
2921 if (TYPE_MODE (type) != TYPE_MODE (etype))
2922 TREE_CST_RTL (expr) = 0;
2929 /* If we are converting between two aggregate types of the same
2930 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2931 This avoid unneeded conversions which makes reference computations
2933 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2934 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2935 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2936 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2937 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2938 TREE_OPERAND (expr, 1));
2942 case UNCONSTRAINED_ARRAY_REF:
2943 /* Convert this to the type of the inner array by getting the address of
2944 the array from the template. */
2945 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2946 build_component_ref (TREE_OPERAND (expr, 0),
2947 get_identifier ("P_ARRAY"),
2949 etype = TREE_TYPE (expr);
2950 ecode = TREE_CODE (etype);
2953 case VIEW_CONVERT_EXPR:
2954 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2955 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2956 return convert (type, TREE_OPERAND (expr, 0));
2960 /* If both types are record types, just convert the pointer and
2961 make a new INDIRECT_REF.
2963 ??? Disable this for now since it causes problems with the
2964 code in build_binary_op for MODIFY_EXPR which wants to
2965 strip off conversions. But that code really is a mess and
2966 we need to do this a much better way some time. */
2968 && (TREE_CODE (type) == RECORD_TYPE
2969 || TREE_CODE (type) == UNION_TYPE)
2970 && (TREE_CODE (etype) == RECORD_TYPE
2971 || TREE_CODE (etype) == UNION_TYPE)
2972 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2973 return build_unary_op (INDIRECT_REF, NULL_TREE,
2974 convert (build_pointer_type (type),
2975 TREE_OPERAND (expr, 0)));
2982 /* Check for converting to a pointer to an unconstrained array. */
2983 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2984 return convert_to_fat_pointer (type, expr);
2986 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2987 || (code == INTEGER_CST && ecode == INTEGER_CST
2988 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2989 return fold (build1 (NOP_EXPR, type, expr));
2994 return build1 (CONVERT_EXPR, type, expr);
2997 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2998 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2999 return unchecked_convert (type, expr);
3000 else if (TYPE_BIASED_REPRESENTATION_P (type))
3001 return fold (build1 (CONVERT_EXPR, type,
3002 fold (build (MINUS_EXPR, TREE_TYPE (type),
3003 convert (TREE_TYPE (type), expr),
3004 TYPE_MIN_VALUE (type)))));
3006 /* ... fall through ... */
3009 return fold (convert_to_integer (type, expr));
3012 case REFERENCE_TYPE:
3013 /* If converting between two pointers to records denoting
3014 both a template and type, adjust if needed to account
3015 for any differing offsets, since one might be negative. */
3016 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3019 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3020 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3021 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3022 sbitsize_int (BITS_PER_UNIT));
3024 expr = build1 (NOP_EXPR, type, expr);
3025 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3026 if (integer_zerop (byte_diff))
3029 return build_binary_op (PLUS_EXPR, type, expr,
3030 fold (convert_to_pointer (type, byte_diff)));
3033 /* If converting to a thin pointer, handle specially. */
3034 if (TYPE_THIN_POINTER_P (type)
3035 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3036 return convert_to_thin_pointer (type, expr);
3038 /* If converting fat pointer to normal pointer, get the pointer to the
3039 array and then convert it. */
3040 else if (TYPE_FAT_POINTER_P (etype))
3041 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3044 return fold (convert_to_pointer (type, expr));
3047 return fold (convert_to_real (type, expr));
3050 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3053 (type, tree_cons (TYPE_FIELDS (type),
3054 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3057 /* ... fall through ... */
3060 /* In these cases, assume the front-end has validated the conversion.
3061 If the conversion is valid, it will be a bit-wise conversion, so
3062 it can be viewed as an unchecked conversion. */
3063 return unchecked_convert (type, expr);
3066 /* Just validate that the type is indeed that of a field
3067 of the type. Then make the simple conversion. */
3068 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3069 if (TREE_TYPE (tem) == etype)
3070 return build1 (CONVERT_EXPR, type, expr);
3074 case UNCONSTRAINED_ARRAY_TYPE:
3075 /* If EXPR is a constrained array, take its address, convert it to a
3076 fat pointer, and then dereference it. Likewise if EXPR is a
3077 record containing both a template and a constrained array.
3078 Note that a record representing a left justified modular type
3079 always represents a packed constrained array. */
3080 if (ecode == ARRAY_TYPE
3081 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3082 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3083 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3086 (INDIRECT_REF, NULL_TREE,
3087 convert_to_fat_pointer (TREE_TYPE (type),
3088 build_unary_op (ADDR_EXPR,
3091 /* Do something very similar for converting one unconstrained
3092 array to another. */
3093 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3095 build_unary_op (INDIRECT_REF, NULL_TREE,
3096 convert (TREE_TYPE (type),
3097 build_unary_op (ADDR_EXPR,
3103 return fold (convert_to_complex (type, expr));
3110 /* Remove all conversions that are done in EXP. This includes converting
3111 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3112 is nonzero, always return the address of the containing object even if
3113 the address is not bit-aligned. */
3116 remove_conversions (exp, true_address)
3120 switch (TREE_CODE (exp))
3124 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3125 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3126 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3130 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3131 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3132 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3135 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3136 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3137 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3146 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3147 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3148 likewise return an expression pointing to the underlying array. */
3151 maybe_unconstrained_array (exp)
3154 enum tree_code code = TREE_CODE (exp);
3157 switch (TREE_CODE (TREE_TYPE (exp)))
3159 case UNCONSTRAINED_ARRAY_TYPE:
3160 if (code == UNCONSTRAINED_ARRAY_REF)
3163 = build_unary_op (INDIRECT_REF, NULL_TREE,
3164 build_component_ref (TREE_OPERAND (exp, 0),
3165 get_identifier ("P_ARRAY"),
3167 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3171 else if (code == NULL_EXPR)
3172 return build1 (NULL_EXPR,
3173 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3174 (TREE_TYPE (TREE_TYPE (exp))))),
3175 TREE_OPERAND (exp, 0));
3177 else if (code == WITH_RECORD_EXPR
3178 && (TREE_OPERAND (exp, 0)
3179 != (new = maybe_unconstrained_array
3180 (TREE_OPERAND (exp, 0)))))
3181 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3182 TREE_OPERAND (exp, 1));
3185 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3188 = build_component_ref (exp, NULL_TREE,
3189 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3190 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3191 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3192 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3205 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3208 unchecked_convert (type, expr)
3212 tree etype = TREE_TYPE (expr);
3214 /* If the expression is already the right type, we are done. */
3218 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3220 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3221 return build (WITH_RECORD_EXPR, type,
3222 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3223 TREE_OPERAND (expr, 1));
3225 /* If both types types are integral just do a normal conversion.
3226 Likewise for a conversion to an unconstrained array. */
3227 if ((((INTEGRAL_TYPE_P (type)
3228 && ! (TREE_CODE (type) == INTEGER_TYPE
3229 && TYPE_VAX_FLOATING_POINT_P (type)))
3230 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3231 || (TREE_CODE (type) == RECORD_TYPE
3232 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3233 && ((INTEGRAL_TYPE_P (etype)
3234 && ! (TREE_CODE (etype) == INTEGER_TYPE
3235 && TYPE_VAX_FLOATING_POINT_P (etype)))
3236 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3237 || (TREE_CODE (etype) == RECORD_TYPE
3238 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3239 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3243 if (TREE_CODE (etype) == INTEGER_TYPE
3244 && TYPE_BIASED_REPRESENTATION_P (etype))
3246 tree ntype = copy_type (etype);
3248 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3249 TYPE_MAIN_VARIANT (ntype) = ntype;
3250 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3253 if (TREE_CODE (type) == INTEGER_TYPE
3254 && TYPE_BIASED_REPRESENTATION_P (type))
3256 rtype = copy_type (type);
3257 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3258 TYPE_MAIN_VARIANT (rtype) = rtype;
3261 expr = convert (rtype, expr);
3263 expr = build1 (GNAT_NOP_EXPR, type, expr);
3266 /* If we are converting TO an integral type whose precision is not the
3267 same as its size, first unchecked convert to a record that contains
3268 an object of the output type. Then extract the field. */
3269 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3270 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3271 GET_MODE_BITSIZE (TYPE_MODE (type))))
3273 tree rec_type = make_node (RECORD_TYPE);
3274 tree field = create_field_decl (get_identifier ("OBJ"), type,
3275 rec_type, 1, 0, 0, 0);
3277 TYPE_FIELDS (rec_type) = field;
3278 layout_type (rec_type);
3280 expr = unchecked_convert (rec_type, expr);
3281 expr = build_component_ref (expr, NULL_TREE, field);
3284 /* Similarly for integral input type whose precision is not equal to its
3286 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3287 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3288 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3290 tree rec_type = make_node (RECORD_TYPE);
3292 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3295 TYPE_FIELDS (rec_type) = field;
3296 layout_type (rec_type);
3298 expr = build_constructor (rec_type, build_tree_list (field, expr));
3299 expr = unchecked_convert (type, expr);
3302 /* We have a special case when we are converting between two
3303 unconstrained array types. In that case, take the address,
3304 convert the fat pointer types, and dereference. */
3305 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3306 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3307 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3308 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3309 build_unary_op (ADDR_EXPR, NULL_TREE,
3313 expr = maybe_unconstrained_array (expr);
3314 etype = TREE_TYPE (expr);
3315 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3318 /* If the result is an integral type whose size is not equal to
3319 the size of the underlying machine type, sign- or zero-extend
3320 the result. We need not do this in the case where the input is
3321 an integral type of the same precision and signedness or if the output
3322 is a biased type or if both the input and output are unsigned. */
3323 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3324 && ! (TREE_CODE (type) == INTEGER_TYPE
3325 && TYPE_BIASED_REPRESENTATION_P (type))
3326 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3327 GET_MODE_BITSIZE (TYPE_MODE (type)))
3328 && ! (INTEGRAL_TYPE_P (etype)
3329 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3330 && operand_equal_p (TYPE_RM_SIZE (type),
3331 (TYPE_RM_SIZE (etype) != 0
3332 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3334 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3336 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3337 TREE_UNSIGNED (type));
3339 = convert (base_type,
3340 size_binop (MINUS_EXPR,
3342 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3343 TYPE_RM_SIZE (type)));
3346 build_binary_op (RSHIFT_EXPR, base_type,
3347 build_binary_op (LSHIFT_EXPR, base_type,
3348 convert (base_type, expr),
3353 /* An unchecked conversion should never raise Constraint_Error. The code
3354 below assumes that GCC's conversion routines overflow the same way that
3355 the underlying hardware does. This is probably true. In the rare case
3356 when it is false, we can rely on the fact that such conversions are
3357 erroneous anyway. */
3358 if (TREE_CODE (expr) == INTEGER_CST)
3359 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3361 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3362 show no longer constant. */
3363 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3364 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3365 TREE_CONSTANT (expr) = 0;