1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
47 static tree find_common_type PARAMS ((tree, tree));
48 static int contains_save_expr_p PARAMS ((tree));
49 static tree contains_null_expr PARAMS ((tree));
50 static tree compare_arrays PARAMS ((tree, tree, tree));
51 static tree nonbinary_modular_operation PARAMS ((enum tree_code, tree,
53 static tree build_simple_component_ref PARAMS ((tree, tree, tree));
55 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
58 This preparation consists of taking the ordinary representation of
59 an expression expr and producing a valid tree boolean expression
60 describing whether expr is nonzero. We could simply always do
62 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
64 but we optimize comparisons, &&, ||, and !.
66 The resulting type should always be the same as the input type.
67 This function is simpler than the corresponding C version since
68 the only possible operands will be things of Boolean type. */
71 gnat_truthvalue_conversion (expr)
74 tree type = TREE_TYPE (expr);
76 switch (TREE_CODE (expr))
78 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
79 case LT_EXPR: case GT_EXPR:
80 case TRUTH_ANDIF_EXPR:
89 /* Distribute the conversion into the arms of a COND_EXPR. */
91 (build (COND_EXPR, type, TREE_OPERAND (expr, 0),
92 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
93 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
95 case WITH_RECORD_EXPR:
96 return build (WITH_RECORD_EXPR, type,
97 gnat_truthvalue_conversion (TREE_OPERAND (expr, 0)),
98 TREE_OPERAND (expr, 1));
101 return build_binary_op (NE_EXPR, type, expr,
102 convert (type, integer_zero_node));
106 /* Return the base type of TYPE. */
112 if (TREE_CODE (type) == RECORD_TYPE
113 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type))
114 type = TREE_TYPE (TYPE_FIELDS (type));
116 while (TREE_TYPE (type) != 0
117 && (TREE_CODE (type) == INTEGER_TYPE
118 || TREE_CODE (type) == REAL_TYPE))
119 type = TREE_TYPE (type);
124 /* Likewise, but only return types known to the Ada source. */
126 get_ada_base_type (type)
129 while (TREE_TYPE (type) != 0
130 && (TREE_CODE (type) == INTEGER_TYPE
131 || TREE_CODE (type) == REAL_TYPE)
132 && ! TYPE_EXTRA_SUBTYPE_P (type))
133 type = TREE_TYPE (type);
138 /* EXP is a GCC tree representing an address. See if we can find how
139 strictly the object at that address is aligned. Return that alignment
140 in bits. If we don't know anything about the alignment, return 0. */
143 known_alignment (exp)
146 unsigned int this_alignment;
147 unsigned int lhs, rhs;
148 unsigned int type_alignment;
150 /* For pointer expressions, we know that the designated object is always at
151 least as strictly aligned as the designated subtype, so we account for
152 both type and expression information in this case.
154 Beware that we can still get a dummy designated subtype here (e.g. Taft
155 Amendement types), in which the alignment information is meaningless and
158 We always compute a type_alignment value and return the MAX of it
159 compared with what we get from the expression tree. Just set the
160 type_alignment value to 0 when the type information is to be ignored. */
162 = ((POINTER_TYPE_P (TREE_TYPE (exp))
163 && ! TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
164 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
166 switch (TREE_CODE (exp))
170 case NON_LVALUE_EXPR:
171 /* Conversions between pointers and integers don't change the alignment
172 of the underlying object. */
173 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
178 /* If two address are added, the alignment of the result is the
179 minimum of the two aligments. */
180 lhs = known_alignment (TREE_OPERAND (exp, 0));
181 rhs = known_alignment (TREE_OPERAND (exp, 1));
182 this_alignment = MIN (lhs, rhs);
186 /* The first part of this represents the lowest bit in the constant,
187 but is it in bytes, not bits. */
190 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
195 /* If we know the alignment of just one side, use it. Otherwise,
196 use the product of the alignments. */
197 lhs = known_alignment (TREE_OPERAND (exp, 0));
198 rhs = known_alignment (TREE_OPERAND (exp, 1));
200 if (lhs == 0 || rhs == 0)
201 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
203 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
207 this_alignment = expr_align (TREE_OPERAND (exp, 0));
215 return MAX (type_alignment, this_alignment);
218 /* We have a comparison or assignment operation on two types, T1 and T2,
219 which are both either array types or both record types.
220 Return the type that both operands should be converted to, if any.
221 Otherwise return zero. */
224 find_common_type (t1, t2)
227 /* If either type is non-BLKmode, use it. Note that we know that we will
228 not have any alignment problems since if we did the non-BLKmode
229 type could not have been used. */
230 if (TYPE_MODE (t1) != BLKmode)
232 else if (TYPE_MODE (t2) != BLKmode)
235 /* Otherwise, return the type that has a constant size. */
236 if (TREE_CONSTANT (TYPE_SIZE (t1)))
238 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
241 /* In this case, both types have variable size. It's probably
242 best to leave the "type mismatch" because changing it could
243 case a bad self-referential reference. */
247 /* See if EXP contains a SAVE_EXPR in a position where we would
250 ??? This is a real kludge, but is probably the best approach short
251 of some very general solution. */
254 contains_save_expr_p (exp)
257 switch (TREE_CODE (exp))
262 case ADDR_EXPR: case INDIRECT_REF:
264 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
265 return contains_save_expr_p (TREE_OPERAND (exp, 0));
268 return (CONSTRUCTOR_ELTS (exp) != 0
269 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp)));
272 return (contains_save_expr_p (TREE_VALUE (exp))
273 || (TREE_CHAIN (exp) != 0
274 && contains_save_expr_p (TREE_CHAIN (exp))));
281 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
282 it if so. This is used to detect types whose sizes involve computations
283 that are known to raise Constraint_Error. */
286 contains_null_expr (exp)
291 if (TREE_CODE (exp) == NULL_EXPR)
294 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
297 return contains_null_expr (TREE_OPERAND (exp, 0));
300 tem = contains_null_expr (TREE_OPERAND (exp, 0));
304 return contains_null_expr (TREE_OPERAND (exp, 1));
307 switch (TREE_CODE (exp))
310 return contains_null_expr (TREE_OPERAND (exp, 0));
313 tem = contains_null_expr (TREE_OPERAND (exp, 0));
317 tem = contains_null_expr (TREE_OPERAND (exp, 1));
321 return contains_null_expr (TREE_OPERAND (exp, 2));
332 /* Return an expression tree representing an equality comparison of
333 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
334 be of type RESULT_TYPE
336 Two arrays are equal in one of two ways: (1) if both have zero length
337 in some dimension (not necessarily the same dimension) or (2) if the
338 lengths in each dimension are equal and the data is equal. We perform the
339 length tests in as efficient a manner as possible. */
342 compare_arrays (result_type, a1, a2)
346 tree t1 = TREE_TYPE (a1);
347 tree t2 = TREE_TYPE (a2);
348 tree result = convert (result_type, integer_one_node);
349 tree a1_is_null = convert (result_type, integer_zero_node);
350 tree a2_is_null = convert (result_type, integer_zero_node);
351 int length_zero_p = 0;
353 /* Process each dimension separately and compare the lengths. If any
354 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
355 suppress the comparison of the data. */
356 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
358 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
359 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
360 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
361 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
362 tree bt = get_base_type (TREE_TYPE (lb1));
363 tree length1 = fold (build (MINUS_EXPR, bt, ub1, lb1));
364 tree length2 = fold (build (MINUS_EXPR, bt, ub2, lb2));
367 tree comparison, this_a1_is_null, this_a2_is_null;
369 /* If the length of the first array is a constant, swap our operands
370 unless the length of the second array is the constant zero.
371 Note that we have set the `length' values to the length - 1. */
372 if (TREE_CODE (length1) == INTEGER_CST
373 && ! integer_zerop (fold (build (PLUS_EXPR, bt, length2,
374 convert (bt, integer_one_node)))))
376 tem = a1, a1 = a2, a2 = tem;
377 tem = t1, t1 = t2, t2 = tem;
378 tem = lb1, lb1 = lb2, lb2 = tem;
379 tem = ub1, ub1 = ub2, ub2 = tem;
380 tem = length1, length1 = length2, length2 = tem;
381 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
384 /* If the length of this dimension in the second array is the constant
385 zero, we can just go inside the original bounds for the first
386 array and see if last < first. */
387 if (integer_zerop (fold (build (PLUS_EXPR, bt, length2,
388 convert (bt, integer_one_node)))))
390 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
391 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
393 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
395 if (CONTAINS_PLACEHOLDER_P (comparison))
396 comparison = build (WITH_RECORD_EXPR, result_type,
398 if (CONTAINS_PLACEHOLDER_P (length1))
399 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
403 this_a1_is_null = comparison;
404 this_a2_is_null = convert (result_type, integer_one_node);
407 /* If the length is some other constant value, we know that the
408 this dimension in the first array cannot be superflat, so we
409 can just use its length from the actual stored bounds. */
410 else if (TREE_CODE (length2) == INTEGER_CST)
412 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
413 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
414 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
415 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
416 nbt = get_base_type (TREE_TYPE (ub1));
419 = build_binary_op (EQ_EXPR, result_type,
420 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
421 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
423 /* Note that we know that UB2 and LB2 are constant and hence
424 cannot contain a PLACEHOLDER_EXPR. */
426 if (CONTAINS_PLACEHOLDER_P (comparison))
427 comparison = build (WITH_RECORD_EXPR, result_type, comparison, a1);
428 if (CONTAINS_PLACEHOLDER_P (length1))
429 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
431 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
432 this_a2_is_null = convert (result_type, integer_zero_node);
435 /* Otherwise compare the computed lengths. */
438 if (CONTAINS_PLACEHOLDER_P (length1))
439 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
440 if (CONTAINS_PLACEHOLDER_P (length2))
441 length2 = build (WITH_RECORD_EXPR, bt, length2, a2);
444 = build_binary_op (EQ_EXPR, result_type, length1, length2);
447 = build_binary_op (LT_EXPR, result_type, length1,
448 convert (bt, integer_zero_node));
450 = build_binary_op (LT_EXPR, result_type, length2,
451 convert (bt, integer_zero_node));
454 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
457 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
458 this_a1_is_null, a1_is_null);
459 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
460 this_a2_is_null, a2_is_null);
466 /* Unless the size of some bound is known to be zero, compare the
467 data in the array. */
470 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
473 a1 = convert (type, a1), a2 = convert (type, a2);
475 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
476 fold (build (EQ_EXPR, result_type, a1, a2)));
480 /* The result is also true if both sizes are zero. */
481 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
482 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
483 a1_is_null, a2_is_null),
486 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
487 starting the comparison above since the place it would be otherwise
488 evaluated would be wrong. */
490 if (contains_save_expr_p (a1))
491 result = build (COMPOUND_EXPR, result_type, a1, result);
493 if (contains_save_expr_p (a2))
494 result = build (COMPOUND_EXPR, result_type, a2, result);
499 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
500 type TYPE. We know that TYPE is a modular type with a nonbinary
504 nonbinary_modular_operation (op_code, type, lhs, rhs)
505 enum tree_code op_code;
509 tree modulus = TYPE_MODULUS (type);
510 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
511 unsigned int precision;
516 /* If this is an addition of a constant, convert it to a subtraction
517 of a constant since we can do that faster. */
518 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
519 rhs = fold (build (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
521 /* For the logical operations, we only need PRECISION bits. For
522 addition and subraction, we need one more and for multiplication we
523 need twice as many. But we never want to make a size smaller than
525 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
526 needed_precision += 1;
527 else if (op_code == MULT_EXPR)
528 needed_precision *= 2;
530 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
532 /* Unsigned will do for everything but subtraction. */
533 if (op_code == MINUS_EXPR)
536 /* If our type is the wrong signedness or isn't wide enough, make a new
537 type and convert both our operands to it. */
538 if (TYPE_PRECISION (op_type) < precision
539 || TREE_UNSIGNED (op_type) != unsignedp)
541 /* Copy the node so we ensure it can be modified to make it modular. */
542 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
543 modulus = convert (op_type, modulus);
544 SET_TYPE_MODULUS (op_type, modulus);
545 TYPE_MODULAR_P (op_type) = 1;
546 lhs = convert (op_type, lhs);
547 rhs = convert (op_type, rhs);
550 /* Do the operation, then we'll fix it up. */
551 result = fold (build (op_code, op_type, lhs, rhs));
553 /* For multiplication, we have no choice but to do a full modulus
554 operation. However, we want to do this in the narrowest
556 if (op_code == MULT_EXPR)
558 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
559 modulus = convert (div_type, modulus);
560 SET_TYPE_MODULUS (div_type, modulus);
561 TYPE_MODULAR_P (div_type) = 1;
562 result = convert (op_type,
563 fold (build (TRUNC_MOD_EXPR, div_type,
564 convert (div_type, result), modulus)));
567 /* For subtraction, add the modulus back if we are negative. */
568 else if (op_code == MINUS_EXPR)
570 result = save_expr (result);
571 result = fold (build (COND_EXPR, op_type,
572 build (LT_EXPR, integer_type_node, result,
573 convert (op_type, integer_zero_node)),
574 fold (build (PLUS_EXPR, op_type,
579 /* For the other operations, subtract the modulus if we are >= it. */
582 result = save_expr (result);
583 result = fold (build (COND_EXPR, op_type,
584 build (GE_EXPR, integer_type_node,
586 fold (build (MINUS_EXPR, op_type,
591 return convert (type, result);
594 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
595 desired for the result. Usually the operation is to be performed
596 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
597 in which case the type to be used will be derived from the operands.
599 This function is very much unlike the ones for C and C++ since we
600 have already done any type conversion and matching required. All we
601 have to do here is validate the work done by SEM and handle subtypes. */
604 build_binary_op (op_code, result_type, left_operand, right_operand)
605 enum tree_code op_code;
610 tree left_type = TREE_TYPE (left_operand);
611 tree right_type = TREE_TYPE (right_operand);
612 tree left_base_type = get_base_type (left_type);
613 tree right_base_type = get_base_type (right_type);
614 tree operation_type = result_type;
618 int has_side_effects = 0;
620 /* If one (but not both, unless they have the same object) operands are a
621 WITH_RECORD_EXPR, do the operation and then surround it with the
622 WITH_RECORD_EXPR. Don't do this for assignment, for an ARRAY_REF, or
623 for an ARRAY_RANGE_REF because we need to keep track of the
624 WITH_RECORD_EXPRs on both operands very carefully. */
625 if (op_code != MODIFY_EXPR && op_code != ARRAY_REF
626 && op_code != ARRAY_RANGE_REF
627 && TREE_CODE (left_operand) == WITH_RECORD_EXPR
628 && (TREE_CODE (right_operand) != WITH_RECORD_EXPR
629 || operand_equal_p (TREE_OPERAND (left_operand, 1),
630 TREE_OPERAND (right_operand, 1), 0)))
632 tree right = right_operand;
634 if (TREE_CODE (right) == WITH_RECORD_EXPR)
635 right = TREE_OPERAND (right, 0);
637 result = build_binary_op (op_code, result_type,
638 TREE_OPERAND (left_operand, 0), right);
639 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
640 TREE_OPERAND (left_operand, 1));
642 else if (op_code != MODIFY_EXPR && op_code != ARRAY_REF
643 && op_code != ARRAY_RANGE_REF
644 && TREE_CODE (left_operand) != WITH_RECORD_EXPR
645 && TREE_CODE (right_operand) == WITH_RECORD_EXPR)
647 result = build_binary_op (op_code, result_type, left_operand,
648 TREE_OPERAND (right_operand, 0));
649 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
650 TREE_OPERAND (right_operand, 1));
653 if (operation_type != 0
654 && TREE_CODE (operation_type) == RECORD_TYPE
655 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
656 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
658 if (operation_type != 0
659 && ! AGGREGATE_TYPE_P (operation_type)
660 && TYPE_EXTRA_SUBTYPE_P (operation_type))
661 operation_type = get_base_type (operation_type);
663 modulus = (operation_type != 0 && TREE_CODE (operation_type) == INTEGER_TYPE
664 && TYPE_MODULAR_P (operation_type)
665 ? TYPE_MODULUS (operation_type) : 0);
670 /* If there were any integral or pointer conversions on LHS, remove
671 them; we'll be putting them back below if needed. Likewise for
672 conversions between array and record types. But don't do this if
673 the right operand is not BLKmode (for packed arrays)
674 unless we are not changing the mode. */
675 while ((TREE_CODE (left_operand) == CONVERT_EXPR
676 || TREE_CODE (left_operand) == NOP_EXPR
677 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
678 && (((INTEGRAL_TYPE_P (left_type)
679 || POINTER_TYPE_P (left_type))
680 && (INTEGRAL_TYPE_P (TREE_TYPE
681 (TREE_OPERAND (left_operand, 0)))
682 || POINTER_TYPE_P (TREE_TYPE
683 (TREE_OPERAND (left_operand, 0)))))
684 || (((TREE_CODE (left_type) == RECORD_TYPE
685 /* Don't remove conversions to left-justified modular
687 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
688 || TREE_CODE (left_type) == ARRAY_TYPE)
689 && ((TREE_CODE (TREE_TYPE
690 (TREE_OPERAND (left_operand, 0)))
692 || (TREE_CODE (TREE_TYPE
693 (TREE_OPERAND (left_operand, 0)))
695 && (TYPE_MODE (right_type) == BLKmode
696 || (TYPE_MODE (left_type)
697 == TYPE_MODE (TREE_TYPE
699 (left_operand, 0))))))))
701 left_operand = TREE_OPERAND (left_operand, 0);
702 left_type = TREE_TYPE (left_operand);
705 if (operation_type == 0)
706 operation_type = left_type;
708 /* If the RHS has a conversion between record and array types and
709 an inner type is no worse, use it. Note we cannot do this for
710 modular types or types with TYPE_ALIGN_OK, since the latter
711 might indicate a conversion between a root type and a class-wide
712 type, which we must not remove. */
713 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
714 && ((TREE_CODE (right_type) == RECORD_TYPE
715 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type)
716 && ! TYPE_ALIGN_OK (right_type)
717 && ! TYPE_IS_FAT_POINTER_P (right_type))
718 || TREE_CODE (right_type) == ARRAY_TYPE)
719 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
721 && ! (TYPE_LEFT_JUSTIFIED_MODULAR_P
722 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
724 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
725 && ! (TYPE_IS_FAT_POINTER_P
726 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
727 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
730 == find_common_type (right_type,
731 TREE_TYPE (TREE_OPERAND
732 (right_operand, 0))))
733 || right_type != best_type))
735 right_operand = TREE_OPERAND (right_operand, 0);
736 right_type = TREE_TYPE (right_operand);
739 /* If we are copying one array or record to another, find the best type
741 if (((TREE_CODE (left_type) == ARRAY_TYPE
742 && TREE_CODE (right_type) == ARRAY_TYPE)
743 || (TREE_CODE (left_type) == RECORD_TYPE
744 && TREE_CODE (right_type) == RECORD_TYPE))
745 && (best_type = find_common_type (left_type, right_type)) != 0)
746 operation_type = best_type;
748 /* If a class-wide type may be involved, force use of the RHS type. */
749 if (TREE_CODE (right_type) == RECORD_TYPE && TYPE_ALIGN_OK (right_type))
750 operation_type = right_type;
752 /* Ensure everything on the LHS is valid. If we have a field reference,
753 strip anything that get_inner_reference can handle. Then remove any
754 conversions with type types having the same code and mode. Mark
755 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
756 either an INDIRECT_REF or a decl. */
757 result = left_operand;
760 tree restype = TREE_TYPE (result);
762 if (TREE_CODE (result) == COMPONENT_REF
763 || TREE_CODE (result) == ARRAY_REF
764 || TREE_CODE (result) == ARRAY_RANGE_REF)
765 while (handled_component_p (result))
766 result = TREE_OPERAND (result, 0);
767 else if (TREE_CODE (result) == REALPART_EXPR
768 || TREE_CODE (result) == IMAGPART_EXPR
769 || TREE_CODE (result) == WITH_RECORD_EXPR
770 || ((TREE_CODE (result) == NOP_EXPR
771 || TREE_CODE (result) == CONVERT_EXPR)
772 && (((TREE_CODE (restype)
773 == TREE_CODE (TREE_TYPE
774 (TREE_OPERAND (result, 0))))
775 && (TYPE_MODE (TREE_TYPE
776 (TREE_OPERAND (result, 0)))
777 == TYPE_MODE (restype)))
778 || TYPE_ALIGN_OK (restype))))
779 result = TREE_OPERAND (result, 0);
780 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
782 TREE_ADDRESSABLE (result) = 1;
783 result = TREE_OPERAND (result, 0);
789 if (TREE_CODE (result) != INDIRECT_REF && TREE_CODE (result) != NULL_EXPR
790 && ! DECL_P (result))
793 /* Convert the right operand to the operation type unless
794 it is either already of the correct type or if the type
795 involves a placeholder, since the RHS may not have the same
797 if (operation_type != right_type
798 && (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type))))
800 /* For a variable-size type, with both BLKmode, convert using
801 CONVERT_EXPR instead of an unchecked conversion since we don't
802 need to make a temporary (and can't anyway). */
803 if (TREE_CODE (TYPE_SIZE (operation_type)) != INTEGER_CST
804 && TYPE_MODE (TREE_TYPE (right_operand)) == BLKmode
805 && TREE_CODE (right_operand) != UNCONSTRAINED_ARRAY_REF)
806 right_operand = build1 (CONVERT_EXPR, operation_type,
809 right_operand = convert (operation_type, right_operand);
811 right_type = operation_type;
814 /* If the modes differ, make up a bogus type and convert the RHS to
815 it. This can happen with packed types. */
816 if (TYPE_MODE (left_type) != TYPE_MODE (right_type))
818 tree new_type = copy_node (left_type);
820 TYPE_SIZE (new_type) = TYPE_SIZE (right_type);
821 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (right_type);
822 TYPE_MAIN_VARIANT (new_type) = new_type;
823 right_operand = convert (new_type, right_operand);
826 has_side_effects = 1;
831 if (operation_type == 0)
832 operation_type = TREE_TYPE (left_type);
834 /* ... fall through ... */
836 case ARRAY_RANGE_REF:
838 /* First convert the right operand to its base type. This will
839 prevent unneed signedness conversions when sizetype is wider than
841 right_operand = convert (right_base_type, right_operand);
842 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
844 if (! TREE_CONSTANT (right_operand)
845 || ! TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
846 gnat_mark_addressable (left_operand);
855 if (POINTER_TYPE_P (left_type))
858 /* ... fall through ... */
862 /* If either operand is a NULL_EXPR, just return a new one. */
863 if (TREE_CODE (left_operand) == NULL_EXPR)
864 return build (op_code, result_type,
865 build1 (NULL_EXPR, integer_type_node,
866 TREE_OPERAND (left_operand, 0)),
869 else if (TREE_CODE (right_operand) == NULL_EXPR)
870 return build (op_code, result_type,
871 build1 (NULL_EXPR, integer_type_node,
872 TREE_OPERAND (right_operand, 0)),
875 /* If either object is a left-justified modular types, get the
876 fields from within. */
877 if (TREE_CODE (left_type) == RECORD_TYPE
878 && TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
880 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
882 left_type = TREE_TYPE (left_operand);
883 left_base_type = get_base_type (left_type);
886 if (TREE_CODE (right_type) == RECORD_TYPE
887 && TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type))
889 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
891 right_type = TREE_TYPE (right_operand);
892 right_base_type = get_base_type (right_type);
895 /* If both objects are arrays, compare them specially. */
896 if ((TREE_CODE (left_type) == ARRAY_TYPE
897 || (TREE_CODE (left_type) == INTEGER_TYPE
898 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
899 && (TREE_CODE (right_type) == ARRAY_TYPE
900 || (TREE_CODE (right_type) == INTEGER_TYPE
901 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
903 result = compare_arrays (result_type, left_operand, right_operand);
905 if (op_code == EQ_EXPR)
907 else if (op_code == NE_EXPR)
908 result = invert_truthvalue (result);
915 /* Otherwise, the base types must be the same unless the objects are
916 records. If we have records, use the best type and convert both
917 operands to that type. */
918 if (left_base_type != right_base_type)
920 if (TREE_CODE (left_base_type) == RECORD_TYPE
921 && TREE_CODE (right_base_type) == RECORD_TYPE)
923 /* The only way these are permitted to be the same is if both
924 types have the same name. In that case, one of them must
925 not be self-referential. Use that one as the best type.
926 Even better is if one is of fixed size. */
929 if (TYPE_NAME (left_base_type) == 0
930 || TYPE_NAME (left_base_type) != TYPE_NAME (right_base_type))
933 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
934 best_type = left_base_type;
935 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
936 best_type = right_base_type;
937 else if (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
938 best_type = left_base_type;
939 else if (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
940 best_type = right_base_type;
944 left_operand = convert (best_type, left_operand);
945 right_operand = convert (best_type, right_operand);
951 /* If we are comparing a fat pointer against zero, we need to
952 just compare the data pointer. */
953 else if (TYPE_FAT_POINTER_P (left_base_type)
954 && TREE_CODE (right_operand) == CONSTRUCTOR
955 && integer_zerop (TREE_VALUE (CONSTRUCTOR_ELTS (right_operand))))
957 right_operand = build_component_ref (left_operand, NULL_TREE,
958 TYPE_FIELDS (left_base_type));
959 left_operand = convert (TREE_TYPE (right_operand),
964 left_operand = convert (left_base_type, left_operand);
965 right_operand = convert (right_base_type, right_operand);
971 case PREINCREMENT_EXPR:
972 case PREDECREMENT_EXPR:
973 case POSTINCREMENT_EXPR:
974 case POSTDECREMENT_EXPR:
975 /* In these, the result type and the left operand type should be the
976 same. Do the operation in the base type of those and convert the
977 right operand (which is an integer) to that type.
979 Note that these operations are only used in loop control where
980 we guarantee that no overflow can occur. So nothing special need
981 be done for modular types. */
983 if (left_type != result_type)
986 operation_type = get_base_type (result_type);
987 left_operand = convert (operation_type, left_operand);
988 right_operand = convert (operation_type, right_operand);
989 has_side_effects = 1;
997 /* The RHS of a shift can be any type. Also, ignore any modulus
998 (we used to abort, but this is needed for unchecked conversion
999 to modular types). Otherwise, processing is the same as normal. */
1000 if (operation_type != left_base_type)
1004 left_operand = convert (operation_type, left_operand);
1007 case TRUTH_ANDIF_EXPR:
1008 case TRUTH_ORIF_EXPR:
1009 case TRUTH_AND_EXPR:
1011 case TRUTH_XOR_EXPR:
1012 left_operand = gnat_truthvalue_conversion (left_operand);
1013 right_operand = gnat_truthvalue_conversion (right_operand);
1019 /* For binary modulus, if the inputs are in range, so are the
1021 if (modulus != 0 && integer_pow2p (modulus))
1027 if (TREE_TYPE (result_type) != left_base_type
1028 || TREE_TYPE (result_type) != right_base_type)
1031 left_operand = convert (left_base_type, left_operand);
1032 right_operand = convert (right_base_type, right_operand);
1035 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1036 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1037 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1038 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1039 /* These always produce results lower than either operand. */
1045 /* The result type should be the same as the base types of the
1046 both operands (and they should be the same). Convert
1047 everything to the result type. */
1049 if (operation_type != left_base_type
1050 || left_base_type != right_base_type)
1053 left_operand = convert (operation_type, left_operand);
1054 right_operand = convert (operation_type, right_operand);
1057 if (modulus != 0 && ! integer_pow2p (modulus))
1059 result = nonbinary_modular_operation (op_code, operation_type,
1060 left_operand, right_operand);
1063 /* If either operand is a NULL_EXPR, just return a new one. */
1064 else if (TREE_CODE (left_operand) == NULL_EXPR)
1065 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1066 else if (TREE_CODE (right_operand) == NULL_EXPR)
1067 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1069 result = fold (build (op_code, operation_type,
1070 left_operand, right_operand));
1072 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1073 TREE_CONSTANT (result)
1074 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1075 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1077 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1078 && TYPE_VOLATILE (operation_type))
1079 TREE_THIS_VOLATILE (result) = 1;
1081 /* If we are working with modular types, perform the MOD operation
1082 if something above hasn't eliminated the need for it. */
1084 result = fold (build (FLOOR_MOD_EXPR, operation_type, result,
1085 convert (operation_type, modulus)));
1087 if (result_type != 0 && result_type != operation_type)
1088 result = convert (result_type, result);
1093 /* Similar, but for unary operations. */
1096 build_unary_op (op_code, result_type, operand)
1097 enum tree_code op_code;
1101 tree type = TREE_TYPE (operand);
1102 tree base_type = get_base_type (type);
1103 tree operation_type = result_type;
1105 int side_effects = 0;
1107 /* If we have a WITH_RECORD_EXPR as our operand, do the operation first,
1108 then surround it with the WITH_RECORD_EXPR. This allows GCC to do better
1109 expression folding. */
1110 if (TREE_CODE (operand) == WITH_RECORD_EXPR)
1112 result = build_unary_op (op_code, result_type,
1113 TREE_OPERAND (operand, 0));
1114 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
1115 TREE_OPERAND (operand, 1));
1118 if (operation_type != 0
1119 && TREE_CODE (operation_type) == RECORD_TYPE
1120 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
1121 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1123 if (operation_type != 0
1124 && ! AGGREGATE_TYPE_P (operation_type)
1125 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1126 operation_type = get_base_type (operation_type);
1132 if (operation_type == 0)
1133 result_type = operation_type = TREE_TYPE (type);
1134 else if (result_type != TREE_TYPE (type))
1137 result = fold (build1 (op_code, operation_type, operand));
1140 case TRUTH_NOT_EXPR:
1141 if (result_type != base_type)
1144 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1147 case ATTR_ADDR_EXPR:
1149 switch (TREE_CODE (operand))
1152 case UNCONSTRAINED_ARRAY_REF:
1153 result = TREE_OPERAND (operand, 0);
1155 /* Make sure the type here is a pointer, not a reference.
1156 GCC wants pointer types for function addresses. */
1157 if (result_type == 0)
1158 result_type = build_pointer_type (type);
1163 TREE_TYPE (result) = type = build_pointer_type (type);
1167 case ARRAY_RANGE_REF:
1170 /* If this is for 'Address, find the address of the prefix and
1171 add the offset to the field. Otherwise, do this the normal
1173 if (op_code == ATTR_ADDR_EXPR)
1175 HOST_WIDE_INT bitsize;
1176 HOST_WIDE_INT bitpos;
1178 enum machine_mode mode;
1179 int unsignedp, volatilep;
1181 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1182 &mode, &unsignedp, &volatilep);
1184 /* If INNER is a padding type whose field has a self-referential
1185 size, convert to that inner type. We know the offset is zero
1186 and we need to have that type visible. */
1187 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1188 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1189 && (CONTAINS_PLACEHOLDER_P
1190 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1191 (TREE_TYPE (inner)))))))
1192 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1195 /* Compute the offset as a byte offset from INNER. */
1197 offset = size_zero_node;
1199 if (bitpos % BITS_PER_UNIT != 0)
1201 ("taking address of object not aligned on storage unit?",
1204 offset = size_binop (PLUS_EXPR, offset,
1205 size_int (bitpos / BITS_PER_UNIT));
1207 /* Take the address of INNER, convert the offset to void *, and
1208 add then. It will later be converted to the desired result
1210 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1211 inner = convert (ptr_void_type_node, inner);
1212 offset = convert (ptr_void_type_node, offset);
1213 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1215 result = convert (build_pointer_type (TREE_TYPE (operand)),
1222 /* If this is just a constructor for a padded record, we can
1223 just take the address of the single field and convert it to
1224 a pointer to our type. */
1225 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1228 = build_unary_op (ADDR_EXPR, NULL_TREE,
1229 TREE_VALUE (CONSTRUCTOR_ELTS (operand)));
1230 result = convert (build_pointer_type (TREE_TYPE (operand)),
1238 if (AGGREGATE_TYPE_P (type)
1239 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1240 return build_unary_op (ADDR_EXPR, result_type,
1241 TREE_OPERAND (operand, 0));
1243 /* If this NOP_EXPR doesn't change the mode, get the result type
1244 from this type and go down. We need to do this in case
1245 this is a conversion of a CONST_DECL. */
1246 if (TYPE_MODE (type) != BLKmode
1247 && (TYPE_MODE (type)
1248 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1249 return build_unary_op (ADDR_EXPR,
1251 ? build_pointer_type (type)
1253 TREE_OPERAND (operand, 0));
1257 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1259 /* ... fall through ... */
1264 /* If we are taking the address of a padded record whose field is
1265 contains a template, take the address of the template. */
1266 if (TREE_CODE (type) == RECORD_TYPE
1267 && TYPE_IS_PADDING_P (type)
1268 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1269 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1271 type = TREE_TYPE (TYPE_FIELDS (type));
1272 operand = convert (type, operand);
1275 if (type != error_mark_node)
1276 operation_type = build_pointer_type (type);
1278 gnat_mark_addressable (operand);
1279 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1282 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1286 /* If we want to refer to an entire unconstrained array,
1287 make up an expression to do so. This will never survive to
1288 the backend. If TYPE is a thin pointer, first convert the
1289 operand to a fat pointer. */
1290 if (TYPE_THIN_POINTER_P (type)
1291 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
1294 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1296 type = TREE_TYPE (operand);
1299 if (TYPE_FAT_POINTER_P (type))
1300 result = build1 (UNCONSTRAINED_ARRAY_REF,
1301 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1303 else if (TREE_CODE (operand) == ADDR_EXPR)
1304 result = TREE_OPERAND (operand, 0);
1308 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1309 TREE_READONLY (result) = TREE_READONLY (TREE_TYPE (type));
1313 = (! TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1319 tree modulus = ((operation_type != 0
1320 && TREE_CODE (operation_type) == INTEGER_TYPE
1321 && TYPE_MODULAR_P (operation_type))
1322 ? TYPE_MODULUS (operation_type) : 0);
1323 int mod_pow2 = modulus != 0 && integer_pow2p (modulus);
1325 /* If this is a modular type, there are various possibilities
1326 depending on the operation and whether the modulus is a
1327 power of two or not. */
1331 if (operation_type != base_type)
1334 operand = convert (operation_type, operand);
1336 /* The fastest in the negate case for binary modulus is
1337 the straightforward code; the TRUNC_MOD_EXPR below
1338 is an AND operation. */
1339 if (op_code == NEGATE_EXPR && mod_pow2)
1340 result = fold (build (TRUNC_MOD_EXPR, operation_type,
1341 fold (build1 (NEGATE_EXPR, operation_type,
1345 /* For nonbinary negate case, return zero for zero operand,
1346 else return the modulus minus the operand. If the modulus
1347 is a power of two minus one, we can do the subtraction
1348 as an XOR since it is equivalent and faster on most machines. */
1349 else if (op_code == NEGATE_EXPR && ! mod_pow2)
1351 if (integer_pow2p (fold (build (PLUS_EXPR, operation_type,
1353 convert (operation_type,
1354 integer_one_node)))))
1355 result = fold (build (BIT_XOR_EXPR, operation_type,
1358 result = fold (build (MINUS_EXPR, operation_type,
1361 result = fold (build (COND_EXPR, operation_type,
1362 fold (build (NE_EXPR, integer_type_node,
1364 convert (operation_type,
1365 integer_zero_node))),
1370 /* For the NOT cases, we need a constant equal to
1371 the modulus minus one. For a binary modulus, we
1372 XOR against the constant and subtract the operand from
1373 that constant for nonbinary modulus. */
1375 tree cnst = fold (build (MINUS_EXPR, operation_type, modulus,
1376 convert (operation_type,
1377 integer_one_node)));
1380 result = fold (build (BIT_XOR_EXPR, operation_type,
1383 result = fold (build (MINUS_EXPR, operation_type,
1391 /* ... fall through ... */
1394 if (operation_type != base_type)
1397 result = fold (build1 (op_code, operation_type, convert (operation_type,
1403 TREE_SIDE_EFFECTS (result) = 1;
1404 if (TREE_CODE (result) == INDIRECT_REF)
1405 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1408 if (result_type != 0 && TREE_TYPE (result) != result_type)
1409 result = convert (result_type, result);
1414 /* Similar, but for COND_EXPR. */
1417 build_cond_expr (result_type, condition_operand, true_operand, false_operand)
1419 tree condition_operand;
1426 /* Front-end verifies that result, true and false operands have same base
1427 type. Convert everything to the result type. */
1429 true_operand = convert (result_type, true_operand);
1430 false_operand = convert (result_type, false_operand);
1432 /* If the result type is unconstrained, take the address of
1433 the operands and then dereference our result. */
1435 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1436 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1439 result_type = build_pointer_type (result_type);
1440 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1441 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1444 result = fold (build (COND_EXPR, result_type, condition_operand,
1445 true_operand, false_operand));
1447 /* If either operand is a SAVE_EXPR (possibly surrounded by
1448 arithmetic, make sure it gets done. */
1449 true_operand = skip_simple_arithmetic (true_operand);
1450 false_operand = skip_simple_arithmetic (false_operand);
1452 if (TREE_CODE (true_operand) == SAVE_EXPR)
1453 result = build (COMPOUND_EXPR, result_type, true_operand, result);
1455 if (TREE_CODE (false_operand) == SAVE_EXPR)
1456 result = build (COMPOUND_EXPR, result_type, false_operand, result);
1458 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1459 SAVE_EXPRs with side effects and not shared by both arms. */
1462 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1468 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1472 build_call_1_expr (fundecl, arg)
1476 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1477 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1478 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1481 TREE_SIDE_EFFECTS (call) = 1;
1486 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1490 build_call_2_expr (fundecl, arg1, arg2)
1494 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1495 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1496 chainon (chainon (NULL_TREE,
1497 build_tree_list (NULL_TREE, arg1)),
1498 build_tree_list (NULL_TREE, arg2)),
1501 TREE_SIDE_EFFECTS (call) = 1;
1506 /* Likewise to call FUNDECL with no arguments. */
1509 build_call_0_expr (fundecl)
1512 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1513 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1514 NULL_TREE, NULL_TREE);
1516 TREE_SIDE_EFFECTS (call) = 1;
1521 /* Call a function that raises an exception and pass the line number and file
1522 name, if requested. MSG says which exception function to call. */
1525 build_call_raise (msg)
1528 tree fndecl = gnat_raise_decls[msg];
1529 const char *str = discard_file_names ? "" : ref_filename;
1530 int len = strlen (str) + 1;
1531 tree filename = build_string (len, str);
1533 TREE_TYPE (filename)
1534 = build_array_type (char_type_node,
1535 build_index_type (build_int_2 (len, 0)));
1538 build_call_2_expr (fndecl,
1539 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1541 build_int_2 (input_line, 0));
1544 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1547 gnat_build_constructor (type, list)
1552 int allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1553 int side_effects = 0;
1556 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1558 if (! TREE_CONSTANT (TREE_VALUE (elmt))
1559 || (TREE_CODE (type) == RECORD_TYPE
1560 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1561 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1562 || ! initializer_constant_valid_p (TREE_VALUE (elmt),
1563 TREE_TYPE (TREE_VALUE (elmt))))
1566 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1569 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1570 be executing the code we generate here in that case, but handle it
1571 specially to avoid the cmpiler blowing up. */
1572 if (TREE_CODE (type) == RECORD_TYPE
1574 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1575 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1578 /* If TYPE is a RECORD_TYPE and the fields are not in the
1579 same order as their bit position, don't treat this as constant
1580 since varasm.c can't handle it. */
1581 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1583 tree last_pos = bitsize_zero_node;
1586 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1588 tree this_pos = bit_position (field);
1590 if (TREE_CODE (this_pos) != INTEGER_CST
1591 || tree_int_cst_lt (this_pos, last_pos))
1597 last_pos = this_pos;
1601 result = build_constructor (type, list);
1602 TREE_CONSTANT (result) = allconstant;
1603 TREE_STATIC (result) = allconstant;
1604 TREE_SIDE_EFFECTS (result) = side_effects;
1605 TREE_READONLY (result) = TREE_READONLY (type);
1610 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1611 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1614 We also handle the fact that we might have been passed a pointer to the
1615 actual record and know how to look for fields in variant parts. */
1618 build_simple_component_ref (record_variable, component, field)
1619 tree record_variable;
1623 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1626 if ((TREE_CODE (record_type) != RECORD_TYPE
1627 && TREE_CODE (record_type) != UNION_TYPE
1628 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1629 || TYPE_SIZE (record_type) == 0)
1632 /* Either COMPONENT or FIELD must be specified, but not both. */
1633 if ((component != 0) == (field != 0))
1636 /* If no field was specified, look for a field with the specified name
1637 in the current record only. */
1639 for (field = TYPE_FIELDS (record_type); field;
1640 field = TREE_CHAIN (field))
1641 if (DECL_NAME (field) == component)
1647 /* If this field is not in the specified record, see if we can find
1648 something in the record whose original field is the same as this one. */
1649 if (DECL_CONTEXT (field) != record_type)
1650 /* Check if there is a field with name COMPONENT in the record. */
1654 /* First loop thru normal components. */
1656 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1657 new_field = TREE_CHAIN (new_field))
1658 if (DECL_ORIGINAL_FIELD (new_field) == field
1659 || new_field == DECL_ORIGINAL_FIELD (field)
1660 || (DECL_ORIGINAL_FIELD (field) != 0
1661 && (DECL_ORIGINAL_FIELD (field)
1662 == DECL_ORIGINAL_FIELD (new_field))))
1665 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1666 the component in the first search. Doing this search in 2 steps
1667 is required to avoiding hidden homonymous fields in the
1671 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1672 new_field = TREE_CHAIN (new_field))
1673 if (DECL_INTERNAL_P (new_field))
1676 = build_simple_component_ref (record_variable,
1677 NULL_TREE, new_field);
1678 ref = build_simple_component_ref (field_ref, NULL_TREE, field);
1690 /* It would be nice to call "fold" here, but that can lose a type
1691 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1692 ref = build (COMPONENT_REF, TREE_TYPE (field), record_variable, field);
1694 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1695 TREE_READONLY (ref) = 1;
1696 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1697 || TYPE_VOLATILE (record_type))
1698 TREE_THIS_VOLATILE (ref) = 1;
1703 /* Like build_simple_component_ref, except that we give an error if the
1704 reference could not be found. */
1707 build_component_ref (record_variable, component, field)
1708 tree record_variable;
1712 tree ref = build_simple_component_ref (record_variable, component, field);
1717 /* If FIELD was specified, assume this is an invalid user field so
1718 raise constraint error. Otherwise, we can't find the type to return, so
1721 else if (field != 0)
1722 return build1 (NULL_EXPR, TREE_TYPE (field),
1723 build_call_raise (CE_Discriminant_Check_Failed));
1728 /* Build a GCC tree to call an allocation or deallocation function.
1729 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1730 generate an allocator.
1732 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1733 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1734 storage pool to use. If not preset, malloc and free will be used except
1735 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1736 object dynamically on the stack frame. */
1739 build_call_alloc_dealloc
1740 (gnu_obj, gnu_size, align, gnat_proc, gnat_pool, gnat_node)
1744 Entity_Id gnat_proc;
1745 Entity_Id gnat_pool;
1748 tree gnu_align = size_int (align / BITS_PER_UNIT);
1750 if (CONTAINS_PLACEHOLDER_P (gnu_size))
1751 gnu_size = build (WITH_RECORD_EXPR, sizetype, gnu_size,
1752 build_unary_op (INDIRECT_REF, NULL_TREE, gnu_obj));
1754 if (Present (gnat_proc))
1756 /* The storage pools are obviously always tagged types, but the
1757 secondary stack uses the same mechanism and is not tagged */
1758 if (Is_Tagged_Type (Etype (gnat_pool)))
1760 /* The size is the third parameter; the alignment is the
1762 Entity_Id gnat_size_type
1763 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1764 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1765 tree gnu_proc = gnat_to_gnu (gnat_proc);
1766 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1767 tree gnu_pool = gnat_to_gnu (gnat_pool);
1768 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1769 tree gnu_args = NULL_TREE;
1772 /* The first arg is always the address of the storage pool; next
1773 comes the address of the object, for a deallocator, then the
1774 size and alignment. */
1776 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1780 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1783 = chainon (gnu_args,
1784 build_tree_list (NULL_TREE,
1785 convert (gnu_size_type, gnu_size)));
1787 = chainon (gnu_args,
1788 build_tree_list (NULL_TREE,
1789 convert (gnu_size_type, gnu_align)));
1791 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1792 gnu_proc_addr, gnu_args, NULL_TREE);
1793 TREE_SIDE_EFFECTS (gnu_call) = 1;
1797 /* Secondary stack case. */
1800 /* The size is the second parameter */
1801 Entity_Id gnat_size_type
1802 = Etype (Next_Formal (First_Formal (gnat_proc)));
1803 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1804 tree gnu_proc = gnat_to_gnu (gnat_proc);
1805 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1806 tree gnu_args = NULL_TREE;
1809 /* The first arg is the address of the object, for a
1810 deallocator, then the size */
1813 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1816 = chainon (gnu_args,
1817 build_tree_list (NULL_TREE,
1818 convert (gnu_size_type, gnu_size)));
1820 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1821 gnu_proc_addr, gnu_args, NULL_TREE);
1822 TREE_SIDE_EFFECTS (gnu_call) = 1;
1828 return build_call_1_expr (free_decl, gnu_obj);
1829 else if (gnat_pool == -1)
1831 /* If the size is a constant, we can put it in the fixed portion of
1832 the stack frame to avoid the need to adjust the stack pointer. */
1833 if (TREE_CODE (gnu_size) == INTEGER_CST && ! flag_stack_check)
1836 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1837 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1839 create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1840 gnu_array_type, NULL_TREE, 0, 0, 0, 0, 0);
1842 return convert (ptr_void_type_node,
1843 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1846 return build (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1850 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1851 Check_No_Implicit_Heap_Alloc (gnat_node);
1852 return build_call_1_expr (malloc_decl, gnu_size);
1856 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1857 initial value is INIT, if INIT is nonzero. Convert the expression to
1858 RESULT_TYPE, which must be some type of pointer. Return the tree.
1859 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1860 the storage pool to use. */
1863 build_allocator (type, init, result_type, gnat_proc, gnat_pool, gnat_node)
1867 Entity_Id gnat_proc;
1868 Entity_Id gnat_pool;
1871 tree size = TYPE_SIZE_UNIT (type);
1874 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1875 if (init != 0 && TREE_CODE (init) == NULL_EXPR)
1876 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1878 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1879 sizes of the object and its template. Allocate the whole thing and
1880 fill in the parts that are known. */
1881 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1884 = (TYPE_FAT_POINTER_P (result_type)
1885 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1886 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1888 = build_unc_object_type (template_type, type,
1889 get_identifier ("ALLOC"));
1890 tree storage_ptr_type = build_pointer_type (storage_type);
1892 tree template_cons = NULL_TREE;
1894 size = TYPE_SIZE_UNIT (storage_type);
1896 if (CONTAINS_PLACEHOLDER_P (size))
1897 size = build (WITH_RECORD_EXPR, sizetype, size, init);
1899 /* If the size overflows, pass -1 so the allocator will raise
1901 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1902 size = ssize_int (-1);
1904 storage = build_call_alloc_dealloc (NULL_TREE, size,
1905 TYPE_ALIGN (storage_type),
1906 gnat_proc, gnat_pool, gnat_node);
1907 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1909 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1911 type = TREE_TYPE (TYPE_FIELDS (type));
1914 init = convert (type, init);
1917 /* If there is an initializing expression, make a constructor for
1918 the entire object including the bounds and copy it into the
1919 object. If there is no initializing expression, just set the
1923 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1925 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1926 build_template (template_type, type,
1932 build (COMPOUND_EXPR, storage_ptr_type,
1934 (MODIFY_EXPR, storage_type,
1935 build_unary_op (INDIRECT_REF, NULL_TREE,
1936 convert (storage_ptr_type, storage)),
1937 gnat_build_constructor (storage_type, template_cons)),
1938 convert (storage_ptr_type, storage)));
1942 (COMPOUND_EXPR, result_type,
1944 (MODIFY_EXPR, template_type,
1946 (build_unary_op (INDIRECT_REF, NULL_TREE,
1947 convert (storage_ptr_type, storage)),
1948 NULL_TREE, TYPE_FIELDS (storage_type)),
1949 build_template (template_type, type, NULL_TREE)),
1950 convert (result_type, convert (storage_ptr_type, storage)));
1953 /* If we have an initializing expression, see if its size is simpler
1954 than the size from the type. */
1955 if (init != 0 && TYPE_SIZE_UNIT (TREE_TYPE (init)) != 0
1956 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1957 || CONTAINS_PLACEHOLDER_P (size)))
1958 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1960 /* If the size is still self-referential, reference the initializing
1961 expression, if it is present. If not, this must have been a
1962 call to allocate a library-level object, in which case we use
1963 the maximum size. */
1964 if (CONTAINS_PLACEHOLDER_P (size))
1967 size = max_size (size, 1);
1969 size = build (WITH_RECORD_EXPR, sizetype, size, init);
1972 /* If the size overflows, pass -1 so the allocator will raise
1974 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1975 size = ssize_int (-1);
1977 /* If this is a type whose alignment is larger than the
1978 biggest we support in normal alignment and this is in
1979 the default storage pool, make an "aligning type", allocate
1980 it, point to the field we need, and return that. */
1981 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1984 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1986 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (new_type),
1987 BIGGEST_ALIGNMENT, Empty,
1989 result = save_expr (result);
1990 result = convert (build_pointer_type (new_type), result);
1991 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1992 result = build_component_ref (result, NULL_TREE,
1993 TYPE_FIELDS (new_type));
1994 result = convert (result_type,
1995 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1998 result = convert (result_type,
1999 build_call_alloc_dealloc (NULL_TREE, size,
2005 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
2006 the value, and return the address. Do this with a COMPOUND_EXPR. */
2010 result = save_expr (result);
2012 = build (COMPOUND_EXPR, TREE_TYPE (result),
2014 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (result)),
2015 build_unary_op (INDIRECT_REF, TREE_TYPE (TREE_TYPE (result)),
2021 return convert (result_type, result);
2024 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2025 GNAT_FORMAL is how we find the descriptor record. */
2028 fill_vms_descriptor (expr, gnat_formal)
2030 Entity_Id gnat_formal;
2032 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
2034 tree const_list = 0;
2036 expr = maybe_unconstrained_array (expr);
2037 gnat_mark_addressable (expr);
2039 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2041 tree init = DECL_INITIAL (field);
2043 if (CONTAINS_PLACEHOLDER_P (init))
2044 init = build (WITH_RECORD_EXPR, TREE_TYPE (init), init, expr);
2046 const_list = tree_cons (field, convert (TREE_TYPE (field), init),
2050 return gnat_build_constructor (record_type, nreverse (const_list));
2053 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2054 should not be allocated in a register. Returns true if successful. */
2057 gnat_mark_addressable (expr_node)
2061 switch (TREE_CODE (expr_node))
2066 case ARRAY_RANGE_REF:
2069 case VIEW_CONVERT_EXPR:
2071 case NON_LVALUE_EXPR:
2074 expr_node = TREE_OPERAND (expr_node, 0);
2078 TREE_ADDRESSABLE (expr_node) = 1;
2084 put_var_into_stack (expr_node, true);
2085 TREE_ADDRESSABLE (expr_node) = 1;
2089 TREE_ADDRESSABLE (expr_node) = 1;
2093 return (DECL_CONST_CORRESPONDING_VAR (expr_node) != 0
2094 && (gnat_mark_addressable
2095 (DECL_CONST_CORRESPONDING_VAR (expr_node))));