1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
47 static tree find_common_type (tree, tree);
48 static int contains_save_expr_p (tree);
49 static tree contains_null_expr (tree);
50 static tree compare_arrays (tree, tree, tree);
51 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
52 static tree build_simple_component_ref (tree, tree, tree, int);
54 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
57 This preparation consists of taking the ordinary representation of
58 an expression expr and producing a valid tree boolean expression
59 describing whether expr is nonzero. We could simply always do
61 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
63 but we optimize comparisons, &&, ||, and !.
65 The resulting type should always be the same as the input type.
66 This function is simpler than the corresponding C version since
67 the only possible operands will be things of Boolean type. */
70 gnat_truthvalue_conversion (tree expr)
72 tree type = TREE_TYPE (expr);
74 switch (TREE_CODE (expr))
76 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
77 case LT_EXPR: case GT_EXPR:
78 case TRUTH_ANDIF_EXPR:
87 /* Distribute the conversion into the arms of a COND_EXPR. */
89 (build (COND_EXPR, type, TREE_OPERAND (expr, 0),
90 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
91 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
93 case WITH_RECORD_EXPR:
94 return build (WITH_RECORD_EXPR, type,
95 gnat_truthvalue_conversion (TREE_OPERAND (expr, 0)),
96 TREE_OPERAND (expr, 1));
99 return build_binary_op (NE_EXPR, type, expr,
100 convert (type, integer_zero_node));
104 /* Return the base type of TYPE. */
107 get_base_type (tree type)
109 if (TREE_CODE (type) == RECORD_TYPE
110 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type))
111 type = TREE_TYPE (TYPE_FIELDS (type));
113 while (TREE_TYPE (type) != 0
114 && (TREE_CODE (type) == INTEGER_TYPE
115 || TREE_CODE (type) == REAL_TYPE))
116 type = TREE_TYPE (type);
121 /* Likewise, but only return types known to the Ada source. */
123 get_ada_base_type (tree type)
125 while (TREE_TYPE (type) != 0
126 && (TREE_CODE (type) == INTEGER_TYPE
127 || TREE_CODE (type) == REAL_TYPE)
128 && ! TYPE_EXTRA_SUBTYPE_P (type))
129 type = TREE_TYPE (type);
134 /* EXP is a GCC tree representing an address. See if we can find how
135 strictly the object at that address is aligned. Return that alignment
136 in bits. If we don't know anything about the alignment, return 0. */
139 known_alignment (tree exp)
141 unsigned int this_alignment;
142 unsigned int lhs, rhs;
143 unsigned int type_alignment;
145 /* For pointer expressions, we know that the designated object is always at
146 least as strictly aligned as the designated subtype, so we account for
147 both type and expression information in this case.
149 Beware that we can still get a dummy designated subtype here (e.g. Taft
150 Amendement types), in which the alignment information is meaningless and
153 We always compute a type_alignment value and return the MAX of it
154 compared with what we get from the expression tree. Just set the
155 type_alignment value to 0 when the type information is to be ignored. */
157 = ((POINTER_TYPE_P (TREE_TYPE (exp))
158 && ! TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
159 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
161 switch (TREE_CODE (exp))
165 case NON_LVALUE_EXPR:
166 /* Conversions between pointers and integers don't change the alignment
167 of the underlying object. */
168 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
173 /* If two address are added, the alignment of the result is the
174 minimum of the two aligments. */
175 lhs = known_alignment (TREE_OPERAND (exp, 0));
176 rhs = known_alignment (TREE_OPERAND (exp, 1));
177 this_alignment = MIN (lhs, rhs);
181 /* The first part of this represents the lowest bit in the constant,
182 but is it in bytes, not bits. */
185 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
190 /* If we know the alignment of just one side, use it. Otherwise,
191 use the product of the alignments. */
192 lhs = known_alignment (TREE_OPERAND (exp, 0));
193 rhs = known_alignment (TREE_OPERAND (exp, 1));
195 if (lhs == 0 || rhs == 0)
196 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
198 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
202 this_alignment = expr_align (TREE_OPERAND (exp, 0));
210 return MAX (type_alignment, this_alignment);
213 /* We have a comparison or assignment operation on two types, T1 and T2,
214 which are both either array types or both record types.
215 Return the type that both operands should be converted to, if any.
216 Otherwise return zero. */
219 find_common_type (tree t1, tree t2)
221 /* If either type is non-BLKmode, use it. Note that we know that we will
222 not have any alignment problems since if we did the non-BLKmode
223 type could not have been used. */
224 if (TYPE_MODE (t1) != BLKmode)
226 else if (TYPE_MODE (t2) != BLKmode)
229 /* Otherwise, return the type that has a constant size. */
230 if (TREE_CONSTANT (TYPE_SIZE (t1)))
232 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
235 /* In this case, both types have variable size. It's probably
236 best to leave the "type mismatch" because changing it could
237 case a bad self-referential reference. */
241 /* See if EXP contains a SAVE_EXPR in a position where we would
244 ??? This is a real kludge, but is probably the best approach short
245 of some very general solution. */
248 contains_save_expr_p (tree exp)
250 switch (TREE_CODE (exp))
255 case ADDR_EXPR: case INDIRECT_REF:
257 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
258 return contains_save_expr_p (TREE_OPERAND (exp, 0));
261 return (CONSTRUCTOR_ELTS (exp) != 0
262 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp)));
265 return (contains_save_expr_p (TREE_VALUE (exp))
266 || (TREE_CHAIN (exp) != 0
267 && contains_save_expr_p (TREE_CHAIN (exp))));
274 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
275 it if so. This is used to detect types whose sizes involve computations
276 that are known to raise Constraint_Error. */
279 contains_null_expr (tree exp)
283 if (TREE_CODE (exp) == NULL_EXPR)
286 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
289 return contains_null_expr (TREE_OPERAND (exp, 0));
292 tem = contains_null_expr (TREE_OPERAND (exp, 0));
296 return contains_null_expr (TREE_OPERAND (exp, 1));
299 switch (TREE_CODE (exp))
302 return contains_null_expr (TREE_OPERAND (exp, 0));
305 tem = contains_null_expr (TREE_OPERAND (exp, 0));
309 tem = contains_null_expr (TREE_OPERAND (exp, 1));
313 return contains_null_expr (TREE_OPERAND (exp, 2));
324 /* Return an expression tree representing an equality comparison of
325 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
326 be of type RESULT_TYPE
328 Two arrays are equal in one of two ways: (1) if both have zero length
329 in some dimension (not necessarily the same dimension) or (2) if the
330 lengths in each dimension are equal and the data is equal. We perform the
331 length tests in as efficient a manner as possible. */
334 compare_arrays (tree result_type, tree a1, tree a2)
336 tree t1 = TREE_TYPE (a1);
337 tree t2 = TREE_TYPE (a2);
338 tree result = convert (result_type, integer_one_node);
339 tree a1_is_null = convert (result_type, integer_zero_node);
340 tree a2_is_null = convert (result_type, integer_zero_node);
341 int length_zero_p = 0;
343 /* Process each dimension separately and compare the lengths. If any
344 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
345 suppress the comparison of the data. */
346 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
348 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
349 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
350 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
351 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
352 tree bt = get_base_type (TREE_TYPE (lb1));
353 tree length1 = fold (build (MINUS_EXPR, bt, ub1, lb1));
354 tree length2 = fold (build (MINUS_EXPR, bt, ub2, lb2));
357 tree comparison, this_a1_is_null, this_a2_is_null;
359 /* If the length of the first array is a constant, swap our operands
360 unless the length of the second array is the constant zero.
361 Note that we have set the `length' values to the length - 1. */
362 if (TREE_CODE (length1) == INTEGER_CST
363 && ! integer_zerop (fold (build (PLUS_EXPR, bt, length2,
364 convert (bt, integer_one_node)))))
366 tem = a1, a1 = a2, a2 = tem;
367 tem = t1, t1 = t2, t2 = tem;
368 tem = lb1, lb1 = lb2, lb2 = tem;
369 tem = ub1, ub1 = ub2, ub2 = tem;
370 tem = length1, length1 = length2, length2 = tem;
371 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
374 /* If the length of this dimension in the second array is the constant
375 zero, we can just go inside the original bounds for the first
376 array and see if last < first. */
377 if (integer_zerop (fold (build (PLUS_EXPR, bt, length2,
378 convert (bt, integer_one_node)))))
380 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
381 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
383 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
385 if (CONTAINS_PLACEHOLDER_P (comparison))
386 comparison = build (WITH_RECORD_EXPR, result_type,
388 if (CONTAINS_PLACEHOLDER_P (length1))
389 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
393 this_a1_is_null = comparison;
394 this_a2_is_null = convert (result_type, integer_one_node);
397 /* If the length is some other constant value, we know that the
398 this dimension in the first array cannot be superflat, so we
399 can just use its length from the actual stored bounds. */
400 else if (TREE_CODE (length2) == INTEGER_CST)
402 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
403 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
404 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
405 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
406 nbt = get_base_type (TREE_TYPE (ub1));
409 = build_binary_op (EQ_EXPR, result_type,
410 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
411 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
413 /* Note that we know that UB2 and LB2 are constant and hence
414 cannot contain a PLACEHOLDER_EXPR. */
416 if (CONTAINS_PLACEHOLDER_P (comparison))
417 comparison = build (WITH_RECORD_EXPR, result_type, comparison, a1);
418 if (CONTAINS_PLACEHOLDER_P (length1))
419 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
421 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
422 this_a2_is_null = convert (result_type, integer_zero_node);
425 /* Otherwise compare the computed lengths. */
428 if (CONTAINS_PLACEHOLDER_P (length1))
429 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
430 if (CONTAINS_PLACEHOLDER_P (length2))
431 length2 = build (WITH_RECORD_EXPR, bt, length2, a2);
434 = build_binary_op (EQ_EXPR, result_type, length1, length2);
437 = build_binary_op (LT_EXPR, result_type, length1,
438 convert (bt, integer_zero_node));
440 = build_binary_op (LT_EXPR, result_type, length2,
441 convert (bt, integer_zero_node));
444 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
447 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
448 this_a1_is_null, a1_is_null);
449 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
450 this_a2_is_null, a2_is_null);
456 /* Unless the size of some bound is known to be zero, compare the
457 data in the array. */
460 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
463 a1 = convert (type, a1), a2 = convert (type, a2);
465 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
466 fold (build (EQ_EXPR, result_type, a1, a2)));
470 /* The result is also true if both sizes are zero. */
471 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
472 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
473 a1_is_null, a2_is_null),
476 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
477 starting the comparison above since the place it would be otherwise
478 evaluated would be wrong. */
480 if (contains_save_expr_p (a1))
481 result = build (COMPOUND_EXPR, result_type, a1, result);
483 if (contains_save_expr_p (a2))
484 result = build (COMPOUND_EXPR, result_type, a2, result);
489 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
490 type TYPE. We know that TYPE is a modular type with a nonbinary
494 nonbinary_modular_operation (enum tree_code op_code,
499 tree modulus = TYPE_MODULUS (type);
500 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
501 unsigned int precision;
506 /* If this is an addition of a constant, convert it to a subtraction
507 of a constant since we can do that faster. */
508 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
509 rhs = fold (build (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
511 /* For the logical operations, we only need PRECISION bits. For
512 addition and subraction, we need one more and for multiplication we
513 need twice as many. But we never want to make a size smaller than
515 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
516 needed_precision += 1;
517 else if (op_code == MULT_EXPR)
518 needed_precision *= 2;
520 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
522 /* Unsigned will do for everything but subtraction. */
523 if (op_code == MINUS_EXPR)
526 /* If our type is the wrong signedness or isn't wide enough, make a new
527 type and convert both our operands to it. */
528 if (TYPE_PRECISION (op_type) < precision
529 || TREE_UNSIGNED (op_type) != unsignedp)
531 /* Copy the node so we ensure it can be modified to make it modular. */
532 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
533 modulus = convert (op_type, modulus);
534 SET_TYPE_MODULUS (op_type, modulus);
535 TYPE_MODULAR_P (op_type) = 1;
536 lhs = convert (op_type, lhs);
537 rhs = convert (op_type, rhs);
540 /* Do the operation, then we'll fix it up. */
541 result = fold (build (op_code, op_type, lhs, rhs));
543 /* For multiplication, we have no choice but to do a full modulus
544 operation. However, we want to do this in the narrowest
546 if (op_code == MULT_EXPR)
548 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
549 modulus = convert (div_type, modulus);
550 SET_TYPE_MODULUS (div_type, modulus);
551 TYPE_MODULAR_P (div_type) = 1;
552 result = convert (op_type,
553 fold (build (TRUNC_MOD_EXPR, div_type,
554 convert (div_type, result), modulus)));
557 /* For subtraction, add the modulus back if we are negative. */
558 else if (op_code == MINUS_EXPR)
560 result = save_expr (result);
561 result = fold (build (COND_EXPR, op_type,
562 build (LT_EXPR, integer_type_node, result,
563 convert (op_type, integer_zero_node)),
564 fold (build (PLUS_EXPR, op_type,
569 /* For the other operations, subtract the modulus if we are >= it. */
572 result = save_expr (result);
573 result = fold (build (COND_EXPR, op_type,
574 build (GE_EXPR, integer_type_node,
576 fold (build (MINUS_EXPR, op_type,
581 return convert (type, result);
584 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
585 desired for the result. Usually the operation is to be performed
586 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
587 in which case the type to be used will be derived from the operands.
589 This function is very much unlike the ones for C and C++ since we
590 have already done any type conversion and matching required. All we
591 have to do here is validate the work done by SEM and handle subtypes. */
594 build_binary_op (enum tree_code op_code,
599 tree left_type = TREE_TYPE (left_operand);
600 tree right_type = TREE_TYPE (right_operand);
601 tree left_base_type = get_base_type (left_type);
602 tree right_base_type = get_base_type (right_type);
603 tree operation_type = result_type;
607 int has_side_effects = 0;
609 /* If one (but not both, unless they have the same object) operands are a
610 WITH_RECORD_EXPR, do the operation and then surround it with the
611 WITH_RECORD_EXPR. Don't do this for assignment, for an ARRAY_REF, or
612 for an ARRAY_RANGE_REF because we need to keep track of the
613 WITH_RECORD_EXPRs on both operands very carefully. */
614 if (op_code != MODIFY_EXPR && op_code != ARRAY_REF
615 && op_code != ARRAY_RANGE_REF
616 && TREE_CODE (left_operand) == WITH_RECORD_EXPR
617 && (TREE_CODE (right_operand) != WITH_RECORD_EXPR
618 || operand_equal_p (TREE_OPERAND (left_operand, 1),
619 TREE_OPERAND (right_operand, 1), 0)))
621 tree right = right_operand;
623 if (TREE_CODE (right) == WITH_RECORD_EXPR)
624 right = TREE_OPERAND (right, 0);
626 result = build_binary_op (op_code, result_type,
627 TREE_OPERAND (left_operand, 0), right);
628 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
629 TREE_OPERAND (left_operand, 1));
631 else if (op_code != MODIFY_EXPR && op_code != ARRAY_REF
632 && op_code != ARRAY_RANGE_REF
633 && TREE_CODE (left_operand) != WITH_RECORD_EXPR
634 && TREE_CODE (right_operand) == WITH_RECORD_EXPR)
636 result = build_binary_op (op_code, result_type, left_operand,
637 TREE_OPERAND (right_operand, 0));
638 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
639 TREE_OPERAND (right_operand, 1));
642 if (operation_type != 0
643 && TREE_CODE (operation_type) == RECORD_TYPE
644 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
645 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
647 if (operation_type != 0
648 && ! AGGREGATE_TYPE_P (operation_type)
649 && TYPE_EXTRA_SUBTYPE_P (operation_type))
650 operation_type = get_base_type (operation_type);
652 modulus = (operation_type != 0 && TREE_CODE (operation_type) == INTEGER_TYPE
653 && TYPE_MODULAR_P (operation_type)
654 ? TYPE_MODULUS (operation_type) : 0);
659 /* If there were any integral or pointer conversions on LHS, remove
660 them; we'll be putting them back below if needed. Likewise for
661 conversions between array and record types. But don't do this if
662 the right operand is not BLKmode (for packed arrays)
663 unless we are not changing the mode. */
664 while ((TREE_CODE (left_operand) == CONVERT_EXPR
665 || TREE_CODE (left_operand) == NOP_EXPR
666 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
667 && (((INTEGRAL_TYPE_P (left_type)
668 || POINTER_TYPE_P (left_type))
669 && (INTEGRAL_TYPE_P (TREE_TYPE
670 (TREE_OPERAND (left_operand, 0)))
671 || POINTER_TYPE_P (TREE_TYPE
672 (TREE_OPERAND (left_operand, 0)))))
673 || (((TREE_CODE (left_type) == RECORD_TYPE
674 /* Don't remove conversions to left-justified modular
676 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
677 || TREE_CODE (left_type) == ARRAY_TYPE)
678 && ((TREE_CODE (TREE_TYPE
679 (TREE_OPERAND (left_operand, 0)))
681 || (TREE_CODE (TREE_TYPE
682 (TREE_OPERAND (left_operand, 0)))
684 && (TYPE_MODE (right_type) == BLKmode
685 || (TYPE_MODE (left_type)
686 == TYPE_MODE (TREE_TYPE
688 (left_operand, 0))))))))
690 left_operand = TREE_OPERAND (left_operand, 0);
691 left_type = TREE_TYPE (left_operand);
694 if (operation_type == 0)
695 operation_type = left_type;
697 /* If the RHS has a conversion between record and array types and
698 an inner type is no worse, use it. Note we cannot do this for
699 modular types or types with TYPE_ALIGN_OK, since the latter
700 might indicate a conversion between a root type and a class-wide
701 type, which we must not remove. */
702 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
703 && ((TREE_CODE (right_type) == RECORD_TYPE
704 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type)
705 && ! TYPE_ALIGN_OK (right_type)
706 && ! TYPE_IS_FAT_POINTER_P (right_type))
707 || TREE_CODE (right_type) == ARRAY_TYPE)
708 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
710 && ! (TYPE_LEFT_JUSTIFIED_MODULAR_P
711 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
713 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
714 && ! (TYPE_IS_FAT_POINTER_P
715 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
716 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
719 == find_common_type (right_type,
720 TREE_TYPE (TREE_OPERAND
721 (right_operand, 0))))
722 || right_type != best_type))
724 right_operand = TREE_OPERAND (right_operand, 0);
725 right_type = TREE_TYPE (right_operand);
728 /* If we are copying one array or record to another, find the best type
730 if (((TREE_CODE (left_type) == ARRAY_TYPE
731 && TREE_CODE (right_type) == ARRAY_TYPE)
732 || (TREE_CODE (left_type) == RECORD_TYPE
733 && TREE_CODE (right_type) == RECORD_TYPE))
734 && (best_type = find_common_type (left_type, right_type)) != 0)
735 operation_type = best_type;
737 /* If a class-wide type may be involved, force use of the RHS type. */
738 if (TREE_CODE (right_type) == RECORD_TYPE && TYPE_ALIGN_OK (right_type))
739 operation_type = right_type;
741 /* Ensure everything on the LHS is valid. If we have a field reference,
742 strip anything that get_inner_reference can handle. Then remove any
743 conversions with type types having the same code and mode. Mark
744 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
745 either an INDIRECT_REF or a decl. */
746 result = left_operand;
749 tree restype = TREE_TYPE (result);
751 if (TREE_CODE (result) == COMPONENT_REF
752 || TREE_CODE (result) == ARRAY_REF
753 || TREE_CODE (result) == ARRAY_RANGE_REF)
754 while (handled_component_p (result))
755 result = TREE_OPERAND (result, 0);
756 else if (TREE_CODE (result) == REALPART_EXPR
757 || TREE_CODE (result) == IMAGPART_EXPR
758 || TREE_CODE (result) == WITH_RECORD_EXPR
759 || ((TREE_CODE (result) == NOP_EXPR
760 || TREE_CODE (result) == CONVERT_EXPR)
761 && (((TREE_CODE (restype)
762 == TREE_CODE (TREE_TYPE
763 (TREE_OPERAND (result, 0))))
764 && (TYPE_MODE (TREE_TYPE
765 (TREE_OPERAND (result, 0)))
766 == TYPE_MODE (restype)))
767 || TYPE_ALIGN_OK (restype))))
768 result = TREE_OPERAND (result, 0);
769 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
771 TREE_ADDRESSABLE (result) = 1;
772 result = TREE_OPERAND (result, 0);
778 if (TREE_CODE (result) != INDIRECT_REF && TREE_CODE (result) != NULL_EXPR
779 && ! DECL_P (result))
782 /* Convert the right operand to the operation type unless
783 it is either already of the correct type or if the type
784 involves a placeholder, since the RHS may not have the same
786 if (operation_type != right_type
787 && (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type))))
789 /* For a variable-size type, with both BLKmode, convert using
790 CONVERT_EXPR instead of an unchecked conversion since we don't
791 need to make a temporary (and can't anyway). */
792 if (TREE_CODE (TYPE_SIZE (operation_type)) != INTEGER_CST
793 && TYPE_MODE (TREE_TYPE (right_operand)) == BLKmode
794 && TREE_CODE (right_operand) != UNCONSTRAINED_ARRAY_REF)
795 right_operand = build1 (CONVERT_EXPR, operation_type,
798 right_operand = convert (operation_type, right_operand);
800 right_type = operation_type;
803 /* If the modes differ, make up a bogus type and convert the RHS to
804 it. This can happen with packed types. */
805 if (TYPE_MODE (left_type) != TYPE_MODE (right_type))
807 tree new_type = copy_node (left_type);
809 TYPE_SIZE (new_type) = TYPE_SIZE (right_type);
810 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (right_type);
811 TYPE_MAIN_VARIANT (new_type) = new_type;
812 right_operand = convert (new_type, right_operand);
815 has_side_effects = 1;
820 if (operation_type == 0)
821 operation_type = TREE_TYPE (left_type);
823 /* ... fall through ... */
825 case ARRAY_RANGE_REF:
827 /* First convert the right operand to its base type. This will
828 prevent unneed signedness conversions when sizetype is wider than
830 right_operand = convert (right_base_type, right_operand);
831 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
833 if (! TREE_CONSTANT (right_operand)
834 || ! TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
835 gnat_mark_addressable (left_operand);
844 if (POINTER_TYPE_P (left_type))
847 /* ... fall through ... */
851 /* If either operand is a NULL_EXPR, just return a new one. */
852 if (TREE_CODE (left_operand) == NULL_EXPR)
853 return build (op_code, result_type,
854 build1 (NULL_EXPR, integer_type_node,
855 TREE_OPERAND (left_operand, 0)),
858 else if (TREE_CODE (right_operand) == NULL_EXPR)
859 return build (op_code, result_type,
860 build1 (NULL_EXPR, integer_type_node,
861 TREE_OPERAND (right_operand, 0)),
864 /* If either object is a left-justified modular types, get the
865 fields from within. */
866 if (TREE_CODE (left_type) == RECORD_TYPE
867 && TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
869 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
871 left_type = TREE_TYPE (left_operand);
872 left_base_type = get_base_type (left_type);
875 if (TREE_CODE (right_type) == RECORD_TYPE
876 && TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type))
878 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
880 right_type = TREE_TYPE (right_operand);
881 right_base_type = get_base_type (right_type);
884 /* If both objects are arrays, compare them specially. */
885 if ((TREE_CODE (left_type) == ARRAY_TYPE
886 || (TREE_CODE (left_type) == INTEGER_TYPE
887 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
888 && (TREE_CODE (right_type) == ARRAY_TYPE
889 || (TREE_CODE (right_type) == INTEGER_TYPE
890 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
892 result = compare_arrays (result_type, left_operand, right_operand);
894 if (op_code == EQ_EXPR)
896 else if (op_code == NE_EXPR)
897 result = invert_truthvalue (result);
904 /* Otherwise, the base types must be the same unless the objects are
905 records. If we have records, use the best type and convert both
906 operands to that type. */
907 if (left_base_type != right_base_type)
909 if (TREE_CODE (left_base_type) == RECORD_TYPE
910 && TREE_CODE (right_base_type) == RECORD_TYPE)
912 /* The only way these are permitted to be the same is if both
913 types have the same name. In that case, one of them must
914 not be self-referential. Use that one as the best type.
915 Even better is if one is of fixed size. */
918 if (TYPE_NAME (left_base_type) == 0
919 || TYPE_NAME (left_base_type) != TYPE_NAME (right_base_type))
922 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
923 best_type = left_base_type;
924 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
925 best_type = right_base_type;
926 else if (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
927 best_type = left_base_type;
928 else if (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
929 best_type = right_base_type;
933 left_operand = convert (best_type, left_operand);
934 right_operand = convert (best_type, right_operand);
940 /* If we are comparing a fat pointer against zero, we need to
941 just compare the data pointer. */
942 else if (TYPE_FAT_POINTER_P (left_base_type)
943 && TREE_CODE (right_operand) == CONSTRUCTOR
944 && integer_zerop (TREE_VALUE (CONSTRUCTOR_ELTS (right_operand))))
946 right_operand = build_component_ref (left_operand, NULL_TREE,
947 TYPE_FIELDS (left_base_type),
949 left_operand = convert (TREE_TYPE (right_operand),
954 left_operand = convert (left_base_type, left_operand);
955 right_operand = convert (right_base_type, right_operand);
961 case PREINCREMENT_EXPR:
962 case PREDECREMENT_EXPR:
963 case POSTINCREMENT_EXPR:
964 case POSTDECREMENT_EXPR:
965 /* In these, the result type and the left operand type should be the
966 same. Do the operation in the base type of those and convert the
967 right operand (which is an integer) to that type.
969 Note that these operations are only used in loop control where
970 we guarantee that no overflow can occur. So nothing special need
971 be done for modular types. */
973 if (left_type != result_type)
976 operation_type = get_base_type (result_type);
977 left_operand = convert (operation_type, left_operand);
978 right_operand = convert (operation_type, right_operand);
979 has_side_effects = 1;
987 /* The RHS of a shift can be any type. Also, ignore any modulus
988 (we used to abort, but this is needed for unchecked conversion
989 to modular types). Otherwise, processing is the same as normal. */
990 if (operation_type != left_base_type)
994 left_operand = convert (operation_type, left_operand);
997 case TRUTH_ANDIF_EXPR:
998 case TRUTH_ORIF_EXPR:
1001 case TRUTH_XOR_EXPR:
1002 left_operand = gnat_truthvalue_conversion (left_operand);
1003 right_operand = gnat_truthvalue_conversion (right_operand);
1009 /* For binary modulus, if the inputs are in range, so are the
1011 if (modulus != 0 && integer_pow2p (modulus))
1017 if (TREE_TYPE (result_type) != left_base_type
1018 || TREE_TYPE (result_type) != right_base_type)
1021 left_operand = convert (left_base_type, left_operand);
1022 right_operand = convert (right_base_type, right_operand);
1025 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1026 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1027 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1028 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1029 /* These always produce results lower than either operand. */
1035 /* The result type should be the same as the base types of the
1036 both operands (and they should be the same). Convert
1037 everything to the result type. */
1039 if (operation_type != left_base_type
1040 || left_base_type != right_base_type)
1043 left_operand = convert (operation_type, left_operand);
1044 right_operand = convert (operation_type, right_operand);
1047 if (modulus != 0 && ! integer_pow2p (modulus))
1049 result = nonbinary_modular_operation (op_code, operation_type,
1050 left_operand, right_operand);
1053 /* If either operand is a NULL_EXPR, just return a new one. */
1054 else if (TREE_CODE (left_operand) == NULL_EXPR)
1055 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1056 else if (TREE_CODE (right_operand) == NULL_EXPR)
1057 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1059 result = fold (build (op_code, operation_type,
1060 left_operand, right_operand));
1062 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1063 TREE_CONSTANT (result)
1064 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1065 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1067 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1068 && TYPE_VOLATILE (operation_type))
1069 TREE_THIS_VOLATILE (result) = 1;
1071 /* If we are working with modular types, perform the MOD operation
1072 if something above hasn't eliminated the need for it. */
1074 result = fold (build (FLOOR_MOD_EXPR, operation_type, result,
1075 convert (operation_type, modulus)));
1077 if (result_type != 0 && result_type != operation_type)
1078 result = convert (result_type, result);
1083 /* Similar, but for unary operations. */
1086 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1088 tree type = TREE_TYPE (operand);
1089 tree base_type = get_base_type (type);
1090 tree operation_type = result_type;
1092 int side_effects = 0;
1094 /* If we have a WITH_RECORD_EXPR as our operand, do the operation first,
1095 then surround it with the WITH_RECORD_EXPR. This allows GCC to do better
1096 expression folding. */
1097 if (TREE_CODE (operand) == WITH_RECORD_EXPR)
1099 result = build_unary_op (op_code, result_type,
1100 TREE_OPERAND (operand, 0));
1101 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
1102 TREE_OPERAND (operand, 1));
1105 if (operation_type != 0
1106 && TREE_CODE (operation_type) == RECORD_TYPE
1107 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
1108 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1110 if (operation_type != 0
1111 && ! AGGREGATE_TYPE_P (operation_type)
1112 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1113 operation_type = get_base_type (operation_type);
1119 if (operation_type == 0)
1120 result_type = operation_type = TREE_TYPE (type);
1121 else if (result_type != TREE_TYPE (type))
1124 result = fold (build1 (op_code, operation_type, operand));
1127 case TRUTH_NOT_EXPR:
1128 if (result_type != base_type)
1131 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1134 case ATTR_ADDR_EXPR:
1136 switch (TREE_CODE (operand))
1139 case UNCONSTRAINED_ARRAY_REF:
1140 result = TREE_OPERAND (operand, 0);
1142 /* Make sure the type here is a pointer, not a reference.
1143 GCC wants pointer types for function addresses. */
1144 if (result_type == 0)
1145 result_type = build_pointer_type (type);
1150 TREE_TYPE (result) = type = build_pointer_type (type);
1154 case ARRAY_RANGE_REF:
1157 /* If this is for 'Address, find the address of the prefix and
1158 add the offset to the field. Otherwise, do this the normal
1160 if (op_code == ATTR_ADDR_EXPR)
1162 HOST_WIDE_INT bitsize;
1163 HOST_WIDE_INT bitpos;
1165 enum machine_mode mode;
1166 int unsignedp, volatilep;
1168 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1169 &mode, &unsignedp, &volatilep);
1171 /* If INNER is a padding type whose field has a self-referential
1172 size, convert to that inner type. We know the offset is zero
1173 and we need to have that type visible. */
1174 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1175 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1176 && (CONTAINS_PLACEHOLDER_P
1177 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1178 (TREE_TYPE (inner)))))))
1179 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1182 /* Compute the offset as a byte offset from INNER. */
1184 offset = size_zero_node;
1186 if (bitpos % BITS_PER_UNIT != 0)
1188 ("taking address of object not aligned on storage unit?",
1191 offset = size_binop (PLUS_EXPR, offset,
1192 size_int (bitpos / BITS_PER_UNIT));
1194 /* Take the address of INNER, convert the offset to void *, and
1195 add then. It will later be converted to the desired result
1197 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1198 inner = convert (ptr_void_type_node, inner);
1199 offset = convert (ptr_void_type_node, offset);
1200 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1202 result = convert (build_pointer_type (TREE_TYPE (operand)),
1209 /* If this is just a constructor for a padded record, we can
1210 just take the address of the single field and convert it to
1211 a pointer to our type. */
1212 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1215 = build_unary_op (ADDR_EXPR, NULL_TREE,
1216 TREE_VALUE (CONSTRUCTOR_ELTS (operand)));
1217 result = convert (build_pointer_type (TREE_TYPE (operand)),
1225 if (AGGREGATE_TYPE_P (type)
1226 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1227 return build_unary_op (ADDR_EXPR, result_type,
1228 TREE_OPERAND (operand, 0));
1230 /* If this NOP_EXPR doesn't change the mode, get the result type
1231 from this type and go down. We need to do this in case
1232 this is a conversion of a CONST_DECL. */
1233 if (TYPE_MODE (type) != BLKmode
1234 && (TYPE_MODE (type)
1235 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1236 return build_unary_op (ADDR_EXPR,
1238 ? build_pointer_type (type)
1240 TREE_OPERAND (operand, 0));
1244 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1246 /* ... fall through ... */
1251 /* If we are taking the address of a padded record whose field is
1252 contains a template, take the address of the template. */
1253 if (TREE_CODE (type) == RECORD_TYPE
1254 && TYPE_IS_PADDING_P (type)
1255 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1256 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1258 type = TREE_TYPE (TYPE_FIELDS (type));
1259 operand = convert (type, operand);
1262 if (type != error_mark_node)
1263 operation_type = build_pointer_type (type);
1265 gnat_mark_addressable (operand);
1266 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1269 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1273 /* If we want to refer to an entire unconstrained array,
1274 make up an expression to do so. This will never survive to
1275 the backend. If TYPE is a thin pointer, first convert the
1276 operand to a fat pointer. */
1277 if (TYPE_THIN_POINTER_P (type)
1278 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
1281 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1283 type = TREE_TYPE (operand);
1286 if (TYPE_FAT_POINTER_P (type))
1287 result = build1 (UNCONSTRAINED_ARRAY_REF,
1288 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1290 else if (TREE_CODE (operand) == ADDR_EXPR)
1291 result = TREE_OPERAND (operand, 0);
1295 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1296 TREE_READONLY (result) = TREE_READONLY (TREE_TYPE (type));
1300 = (! TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1306 tree modulus = ((operation_type != 0
1307 && TREE_CODE (operation_type) == INTEGER_TYPE
1308 && TYPE_MODULAR_P (operation_type))
1309 ? TYPE_MODULUS (operation_type) : 0);
1310 int mod_pow2 = modulus != 0 && integer_pow2p (modulus);
1312 /* If this is a modular type, there are various possibilities
1313 depending on the operation and whether the modulus is a
1314 power of two or not. */
1318 if (operation_type != base_type)
1321 operand = convert (operation_type, operand);
1323 /* The fastest in the negate case for binary modulus is
1324 the straightforward code; the TRUNC_MOD_EXPR below
1325 is an AND operation. */
1326 if (op_code == NEGATE_EXPR && mod_pow2)
1327 result = fold (build (TRUNC_MOD_EXPR, operation_type,
1328 fold (build1 (NEGATE_EXPR, operation_type,
1332 /* For nonbinary negate case, return zero for zero operand,
1333 else return the modulus minus the operand. If the modulus
1334 is a power of two minus one, we can do the subtraction
1335 as an XOR since it is equivalent and faster on most machines. */
1336 else if (op_code == NEGATE_EXPR && ! mod_pow2)
1338 if (integer_pow2p (fold (build (PLUS_EXPR, operation_type,
1340 convert (operation_type,
1341 integer_one_node)))))
1342 result = fold (build (BIT_XOR_EXPR, operation_type,
1345 result = fold (build (MINUS_EXPR, operation_type,
1348 result = fold (build (COND_EXPR, operation_type,
1349 fold (build (NE_EXPR, integer_type_node,
1351 convert (operation_type,
1352 integer_zero_node))),
1357 /* For the NOT cases, we need a constant equal to
1358 the modulus minus one. For a binary modulus, we
1359 XOR against the constant and subtract the operand from
1360 that constant for nonbinary modulus. */
1362 tree cnst = fold (build (MINUS_EXPR, operation_type, modulus,
1363 convert (operation_type,
1364 integer_one_node)));
1367 result = fold (build (BIT_XOR_EXPR, operation_type,
1370 result = fold (build (MINUS_EXPR, operation_type,
1378 /* ... fall through ... */
1381 if (operation_type != base_type)
1384 result = fold (build1 (op_code, operation_type, convert (operation_type,
1390 TREE_SIDE_EFFECTS (result) = 1;
1391 if (TREE_CODE (result) == INDIRECT_REF)
1392 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1395 if (result_type != 0 && TREE_TYPE (result) != result_type)
1396 result = convert (result_type, result);
1401 /* Similar, but for COND_EXPR. */
1404 build_cond_expr (tree result_type,
1405 tree condition_operand,
1412 /* Front-end verifies that result, true and false operands have same base
1413 type. Convert everything to the result type. */
1415 true_operand = convert (result_type, true_operand);
1416 false_operand = convert (result_type, false_operand);
1418 /* If the result type is unconstrained, take the address of
1419 the operands and then dereference our result. */
1421 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1422 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1425 result_type = build_pointer_type (result_type);
1426 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1427 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1430 result = fold (build (COND_EXPR, result_type, condition_operand,
1431 true_operand, false_operand));
1433 /* If either operand is a SAVE_EXPR (possibly surrounded by
1434 arithmetic, make sure it gets done. */
1435 true_operand = skip_simple_arithmetic (true_operand);
1436 false_operand = skip_simple_arithmetic (false_operand);
1438 if (TREE_CODE (true_operand) == SAVE_EXPR)
1439 result = build (COMPOUND_EXPR, result_type, true_operand, result);
1441 if (TREE_CODE (false_operand) == SAVE_EXPR)
1442 result = build (COMPOUND_EXPR, result_type, false_operand, result);
1444 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1445 SAVE_EXPRs with side effects and not shared by both arms. */
1448 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1454 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1458 build_call_1_expr (tree fundecl, tree arg)
1460 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1461 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1462 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1465 TREE_SIDE_EFFECTS (call) = 1;
1470 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1474 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1476 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1477 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1478 chainon (chainon (NULL_TREE,
1479 build_tree_list (NULL_TREE, arg1)),
1480 build_tree_list (NULL_TREE, arg2)),
1483 TREE_SIDE_EFFECTS (call) = 1;
1488 /* Likewise to call FUNDECL with no arguments. */
1491 build_call_0_expr (tree fundecl)
1493 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1494 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1495 NULL_TREE, NULL_TREE);
1497 TREE_SIDE_EFFECTS (call) = 1;
1502 /* Call a function that raises an exception and pass the line number and file
1503 name, if requested. MSG says which exception function to call. */
1506 build_call_raise (int msg)
1508 tree fndecl = gnat_raise_decls[msg];
1509 const char *str = discard_file_names ? "" : ref_filename;
1510 int len = strlen (str) + 1;
1511 tree filename = build_string (len, str);
1513 TREE_TYPE (filename)
1514 = build_array_type (char_type_node,
1515 build_index_type (build_int_2 (len, 0)));
1518 build_call_2_expr (fndecl,
1519 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1521 build_int_2 (input_line, 0));
1524 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1527 gnat_build_constructor (tree type, tree list)
1530 int allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1531 int side_effects = 0;
1534 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1536 if (! TREE_CONSTANT (TREE_VALUE (elmt))
1537 || (TREE_CODE (type) == RECORD_TYPE
1538 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1539 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1540 || ! initializer_constant_valid_p (TREE_VALUE (elmt),
1541 TREE_TYPE (TREE_VALUE (elmt))))
1544 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1547 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1548 be executing the code we generate here in that case, but handle it
1549 specially to avoid the cmpiler blowing up. */
1550 if (TREE_CODE (type) == RECORD_TYPE
1552 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1553 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1556 /* If TYPE is a RECORD_TYPE and the fields are not in the
1557 same order as their bit position, don't treat this as constant
1558 since varasm.c can't handle it. */
1559 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1561 tree last_pos = bitsize_zero_node;
1564 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1566 tree this_pos = bit_position (field);
1568 if (TREE_CODE (this_pos) != INTEGER_CST
1569 || tree_int_cst_lt (this_pos, last_pos))
1575 last_pos = this_pos;
1579 result = build_constructor (type, list);
1580 TREE_CONSTANT (result) = allconstant;
1581 TREE_STATIC (result) = allconstant;
1582 TREE_SIDE_EFFECTS (result) = side_effects;
1583 TREE_READONLY (result) = TREE_READONLY (type);
1588 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1589 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1590 for the field. Don't fold the result if NO_FOLD_P is nonzero.
1592 We also handle the fact that we might have been passed a pointer to the
1593 actual record and know how to look for fields in variant parts. */
1596 build_simple_component_ref (tree record_variable,
1601 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1604 if ((TREE_CODE (record_type) != RECORD_TYPE
1605 && TREE_CODE (record_type) != UNION_TYPE
1606 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1607 || TYPE_SIZE (record_type) == 0)
1610 /* Either COMPONENT or FIELD must be specified, but not both. */
1611 if ((component != 0) == (field != 0))
1614 /* If no field was specified, look for a field with the specified name
1615 in the current record only. */
1617 for (field = TYPE_FIELDS (record_type); field;
1618 field = TREE_CHAIN (field))
1619 if (DECL_NAME (field) == component)
1625 /* If this field is not in the specified record, see if we can find
1626 something in the record whose original field is the same as this one. */
1627 if (DECL_CONTEXT (field) != record_type)
1628 /* Check if there is a field with name COMPONENT in the record. */
1632 /* First loop thru normal components. */
1634 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1635 new_field = TREE_CHAIN (new_field))
1636 if (DECL_ORIGINAL_FIELD (new_field) == field
1637 || new_field == DECL_ORIGINAL_FIELD (field)
1638 || (DECL_ORIGINAL_FIELD (field) != 0
1639 && (DECL_ORIGINAL_FIELD (field)
1640 == DECL_ORIGINAL_FIELD (new_field))))
1643 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1644 the component in the first search. Doing this search in 2 steps
1645 is required to avoiding hidden homonymous fields in the
1649 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1650 new_field = TREE_CHAIN (new_field))
1651 if (DECL_INTERNAL_P (new_field))
1654 = build_simple_component_ref (record_variable,
1655 NULL_TREE, new_field, no_fold_p);
1656 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1669 /* It would be nice to call "fold" here, but that can lose a type
1670 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1671 ref = build (COMPONENT_REF, TREE_TYPE (field), record_variable, field);
1673 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1674 TREE_READONLY (ref) = 1;
1675 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1676 || TYPE_VOLATILE (record_type))
1677 TREE_THIS_VOLATILE (ref) = 1;
1679 return no_fold_p ? ref : fold (ref);
1682 /* Like build_simple_component_ref, except that we give an error if the
1683 reference could not be found. */
1686 build_component_ref (tree record_variable,
1691 tree ref = build_simple_component_ref (record_variable, component, field,
1697 /* If FIELD was specified, assume this is an invalid user field so
1698 raise constraint error. Otherwise, we can't find the type to return, so
1701 else if (field != 0)
1702 return build1 (NULL_EXPR, TREE_TYPE (field),
1703 build_call_raise (CE_Discriminant_Check_Failed));
1708 /* Build a GCC tree to call an allocation or deallocation function.
1709 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1710 generate an allocator.
1712 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1713 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1714 storage pool to use. If not preset, malloc and free will be used except
1715 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1716 object dynamically on the stack frame. */
1719 build_call_alloc_dealloc (tree gnu_obj,
1722 Entity_Id gnat_proc,
1723 Entity_Id gnat_pool,
1726 tree gnu_align = size_int (align / BITS_PER_UNIT);
1728 if (CONTAINS_PLACEHOLDER_P (gnu_size))
1729 gnu_size = build (WITH_RECORD_EXPR, sizetype, gnu_size,
1730 build_unary_op (INDIRECT_REF, NULL_TREE, gnu_obj));
1732 if (Present (gnat_proc))
1734 /* The storage pools are obviously always tagged types, but the
1735 secondary stack uses the same mechanism and is not tagged */
1736 if (Is_Tagged_Type (Etype (gnat_pool)))
1738 /* The size is the third parameter; the alignment is the
1740 Entity_Id gnat_size_type
1741 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1742 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1743 tree gnu_proc = gnat_to_gnu (gnat_proc);
1744 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1745 tree gnu_pool = gnat_to_gnu (gnat_pool);
1746 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1747 tree gnu_args = NULL_TREE;
1750 /* The first arg is always the address of the storage pool; next
1751 comes the address of the object, for a deallocator, then the
1752 size and alignment. */
1754 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1758 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1761 = chainon (gnu_args,
1762 build_tree_list (NULL_TREE,
1763 convert (gnu_size_type, gnu_size)));
1765 = chainon (gnu_args,
1766 build_tree_list (NULL_TREE,
1767 convert (gnu_size_type, gnu_align)));
1769 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1770 gnu_proc_addr, gnu_args, NULL_TREE);
1771 TREE_SIDE_EFFECTS (gnu_call) = 1;
1775 /* Secondary stack case. */
1778 /* The size is the second parameter */
1779 Entity_Id gnat_size_type
1780 = Etype (Next_Formal (First_Formal (gnat_proc)));
1781 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1782 tree gnu_proc = gnat_to_gnu (gnat_proc);
1783 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1784 tree gnu_args = NULL_TREE;
1787 /* The first arg is the address of the object, for a
1788 deallocator, then the size */
1791 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1794 = chainon (gnu_args,
1795 build_tree_list (NULL_TREE,
1796 convert (gnu_size_type, gnu_size)));
1798 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1799 gnu_proc_addr, gnu_args, NULL_TREE);
1800 TREE_SIDE_EFFECTS (gnu_call) = 1;
1806 return build_call_1_expr (free_decl, gnu_obj);
1807 else if (gnat_pool == -1)
1809 /* If the size is a constant, we can put it in the fixed portion of
1810 the stack frame to avoid the need to adjust the stack pointer. */
1811 if (TREE_CODE (gnu_size) == INTEGER_CST && ! flag_stack_check)
1814 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1815 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1817 create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1818 gnu_array_type, NULL_TREE, 0, 0, 0, 0, 0);
1820 return convert (ptr_void_type_node,
1821 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1824 return build (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1828 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1829 Check_No_Implicit_Heap_Alloc (gnat_node);
1830 return build_call_1_expr (malloc_decl, gnu_size);
1834 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1835 initial value is INIT, if INIT is nonzero. Convert the expression to
1836 RESULT_TYPE, which must be some type of pointer. Return the tree.
1837 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1838 the storage pool to use. */
1841 build_allocator (tree type,
1844 Entity_Id gnat_proc,
1845 Entity_Id gnat_pool,
1848 tree size = TYPE_SIZE_UNIT (type);
1851 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1852 if (init != 0 && TREE_CODE (init) == NULL_EXPR)
1853 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1855 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1856 sizes of the object and its template. Allocate the whole thing and
1857 fill in the parts that are known. */
1858 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1861 = (TYPE_FAT_POINTER_P (result_type)
1862 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1863 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1865 = build_unc_object_type (template_type, type,
1866 get_identifier ("ALLOC"));
1867 tree storage_ptr_type = build_pointer_type (storage_type);
1869 tree template_cons = NULL_TREE;
1871 size = TYPE_SIZE_UNIT (storage_type);
1873 if (CONTAINS_PLACEHOLDER_P (size))
1874 size = build (WITH_RECORD_EXPR, sizetype, size, init);
1876 /* If the size overflows, pass -1 so the allocator will raise
1878 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1879 size = ssize_int (-1);
1881 storage = build_call_alloc_dealloc (NULL_TREE, size,
1882 TYPE_ALIGN (storage_type),
1883 gnat_proc, gnat_pool, gnat_node);
1884 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1886 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1888 type = TREE_TYPE (TYPE_FIELDS (type));
1891 init = convert (type, init);
1894 /* If there is an initializing expression, make a constructor for
1895 the entire object including the bounds and copy it into the
1896 object. If there is no initializing expression, just set the
1900 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1902 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1903 build_template (template_type, type,
1909 build (COMPOUND_EXPR, storage_ptr_type,
1911 (MODIFY_EXPR, storage_type,
1912 build_unary_op (INDIRECT_REF, NULL_TREE,
1913 convert (storage_ptr_type, storage)),
1914 gnat_build_constructor (storage_type, template_cons)),
1915 convert (storage_ptr_type, storage)));
1919 (COMPOUND_EXPR, result_type,
1921 (MODIFY_EXPR, template_type,
1923 (build_unary_op (INDIRECT_REF, NULL_TREE,
1924 convert (storage_ptr_type, storage)),
1925 NULL_TREE, TYPE_FIELDS (storage_type), 0),
1926 build_template (template_type, type, NULL_TREE)),
1927 convert (result_type, convert (storage_ptr_type, storage)));
1930 /* If we have an initializing expression, see if its size is simpler
1931 than the size from the type. */
1932 if (init != 0 && TYPE_SIZE_UNIT (TREE_TYPE (init)) != 0
1933 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1934 || CONTAINS_PLACEHOLDER_P (size)))
1935 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1937 /* If the size is still self-referential, reference the initializing
1938 expression, if it is present. If not, this must have been a
1939 call to allocate a library-level object, in which case we use
1940 the maximum size. */
1941 if (CONTAINS_PLACEHOLDER_P (size))
1944 size = max_size (size, 1);
1946 size = build (WITH_RECORD_EXPR, sizetype, size, init);
1949 /* If the size overflows, pass -1 so the allocator will raise
1951 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1952 size = ssize_int (-1);
1954 /* If this is a type whose alignment is larger than the
1955 biggest we support in normal alignment and this is in
1956 the default storage pool, make an "aligning type", allocate
1957 it, point to the field we need, and return that. */
1958 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1961 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1963 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (new_type),
1964 BIGGEST_ALIGNMENT, Empty,
1966 result = save_expr (result);
1967 result = convert (build_pointer_type (new_type), result);
1968 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1969 result = build_component_ref (result, NULL_TREE,
1970 TYPE_FIELDS (new_type), 0);
1971 result = convert (result_type,
1972 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1975 result = convert (result_type,
1976 build_call_alloc_dealloc (NULL_TREE, size,
1982 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1983 the value, and return the address. Do this with a COMPOUND_EXPR. */
1987 result = save_expr (result);
1989 = build (COMPOUND_EXPR, TREE_TYPE (result),
1991 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (result)),
1992 build_unary_op (INDIRECT_REF, TREE_TYPE (TREE_TYPE (result)),
1998 return convert (result_type, result);
2001 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2002 GNAT_FORMAL is how we find the descriptor record. */
2005 fill_vms_descriptor (tree expr, Entity_Id gnat_formal)
2007 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
2009 tree const_list = 0;
2011 expr = maybe_unconstrained_array (expr);
2012 gnat_mark_addressable (expr);
2014 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2016 tree init = DECL_INITIAL (field);
2018 if (CONTAINS_PLACEHOLDER_P (init))
2019 init = build (WITH_RECORD_EXPR, TREE_TYPE (init), init, expr);
2021 const_list = tree_cons (field, convert (TREE_TYPE (field), init),
2025 return gnat_build_constructor (record_type, nreverse (const_list));
2028 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2029 should not be allocated in a register. Returns true if successful. */
2032 gnat_mark_addressable (tree expr_node)
2035 switch (TREE_CODE (expr_node))
2040 case ARRAY_RANGE_REF:
2043 case VIEW_CONVERT_EXPR:
2045 case NON_LVALUE_EXPR:
2048 expr_node = TREE_OPERAND (expr_node, 0);
2052 TREE_ADDRESSABLE (expr_node) = 1;
2058 put_var_into_stack (expr_node, true);
2059 TREE_ADDRESSABLE (expr_node) = 1;
2063 TREE_ADDRESSABLE (expr_node) = 1;
2067 return (DECL_CONST_CORRESPONDING_VAR (expr_node) != 0
2068 && (gnat_mark_addressable
2069 (DECL_CONST_CORRESPONDING_VAR (expr_node))));