1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
34 #include "tree-inline.h"
51 static tree find_common_type (tree, tree);
52 static bool contains_save_expr_p (tree);
53 static tree contains_null_expr (tree);
54 static tree compare_arrays (tree, tree, tree);
55 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
56 static tree build_simple_component_ref (tree, tree, tree, bool);
58 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
61 This preparation consists of taking the ordinary representation of
62 an expression expr and producing a valid tree boolean expression
63 describing whether expr is nonzero. We could simply always do
65 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
67 but we optimize comparisons, &&, ||, and !.
69 The resulting type should always be the same as the input type.
70 This function is simpler than the corresponding C version since
71 the only possible operands will be things of Boolean type. */
74 gnat_truthvalue_conversion (tree expr)
76 tree type = TREE_TYPE (expr);
78 switch (TREE_CODE (expr))
80 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
81 case LT_EXPR: case GT_EXPR:
82 case TRUTH_ANDIF_EXPR:
91 return (integer_zerop (expr)
92 ? build_int_cst (type, 0)
93 : build_int_cst (type, 1));
96 return (real_zerop (expr)
97 ? fold_convert (type, integer_zero_node)
98 : fold_convert (type, integer_one_node));
101 /* Distribute the conversion into the arms of a COND_EXPR. */
103 tree arg1 = gnat_truthvalue_conversion (TREE_OPERAND (expr, 1));
104 tree arg2 = gnat_truthvalue_conversion (TREE_OPERAND (expr, 2));
105 return fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
110 return build_binary_op (NE_EXPR, type, expr,
111 fold_convert (type, integer_zero_node));
115 /* Return the base type of TYPE. */
118 get_base_type (tree type)
120 if (TREE_CODE (type) == RECORD_TYPE
121 && TYPE_JUSTIFIED_MODULAR_P (type))
122 type = TREE_TYPE (TYPE_FIELDS (type));
124 while (TREE_TYPE (type)
125 && (TREE_CODE (type) == INTEGER_TYPE
126 || TREE_CODE (type) == REAL_TYPE))
127 type = TREE_TYPE (type);
132 /* EXP is a GCC tree representing an address. See if we can find how
133 strictly the object at that address is aligned. Return that alignment
134 in bits. If we don't know anything about the alignment, return 0. */
137 known_alignment (tree exp)
139 unsigned int this_alignment;
140 unsigned int lhs, rhs;
142 switch (TREE_CODE (exp))
145 case VIEW_CONVERT_EXPR:
146 case NON_LVALUE_EXPR:
147 /* Conversions between pointers and integers don't change the alignment
148 of the underlying object. */
149 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
153 /* The value of a COMPOUND_EXPR is that of it's second operand. */
154 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
159 /* If two address are added, the alignment of the result is the
160 minimum of the two alignments. */
161 lhs = known_alignment (TREE_OPERAND (exp, 0));
162 rhs = known_alignment (TREE_OPERAND (exp, 1));
163 this_alignment = MIN (lhs, rhs);
166 case POINTER_PLUS_EXPR:
167 lhs = known_alignment (TREE_OPERAND (exp, 0));
168 rhs = known_alignment (TREE_OPERAND (exp, 1));
169 /* If we don't know the alignment of the offset, we assume that
172 this_alignment = lhs;
174 this_alignment = MIN (lhs, rhs);
178 /* If there is a choice between two values, use the smallest one. */
179 lhs = known_alignment (TREE_OPERAND (exp, 1));
180 rhs = known_alignment (TREE_OPERAND (exp, 2));
181 this_alignment = MIN (lhs, rhs);
186 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
187 /* The first part of this represents the lowest bit in the constant,
188 but it is originally in bytes, not bits. */
189 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
194 /* If we know the alignment of just one side, use it. Otherwise,
195 use the product of the alignments. */
196 lhs = known_alignment (TREE_OPERAND (exp, 0));
197 rhs = known_alignment (TREE_OPERAND (exp, 1));
200 this_alignment = rhs;
202 this_alignment = lhs;
204 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
208 /* A bit-and expression is as aligned as the maximum alignment of the
209 operands. We typically get here for a complex lhs and a constant
210 negative power of two on the rhs to force an explicit alignment, so
211 don't bother looking at the lhs. */
212 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
216 this_alignment = expr_align (TREE_OPERAND (exp, 0));
221 tree t = maybe_inline_call_in_expr (exp);
223 return known_alignment (t);
226 /* Fall through... */
229 /* For other pointer expressions, we assume that the pointed-to object
230 is at least as aligned as the pointed-to type. Beware that we can
231 have a dummy type here (e.g. a Taft Amendment type), for which the
232 alignment is meaningless and should be ignored. */
233 if (POINTER_TYPE_P (TREE_TYPE (exp))
234 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
235 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
241 return this_alignment;
244 /* We have a comparison or assignment operation on two types, T1 and T2, which
245 are either both array types or both record types. T1 is assumed to be for
246 the left hand side operand, and T2 for the right hand side. Return the
247 type that both operands should be converted to for the operation, if any.
248 Otherwise return zero. */
251 find_common_type (tree t1, tree t2)
253 /* ??? As of today, various constructs lead here with types of different
254 sizes even when both constants (e.g. tagged types, packable vs regular
255 component types, padded vs unpadded types, ...). While some of these
256 would better be handled upstream (types should be made consistent before
257 calling into build_binary_op), some others are really expected and we
258 have to be careful. */
260 /* We must prevent writing more than what the target may hold if this is for
261 an assignment and the case of tagged types is handled in build_binary_op
262 so use the lhs type if it is known to be smaller, or of constant size and
263 the rhs type is not, whatever the modes. We also force t1 in case of
264 constant size equality to minimize occurrences of view conversions on the
265 lhs of assignments. */
266 if (TREE_CONSTANT (TYPE_SIZE (t1))
267 && (!TREE_CONSTANT (TYPE_SIZE (t2))
268 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
271 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
272 that we will not have any alignment problems since, if we did, the
273 non-BLKmode type could not have been used. */
274 if (TYPE_MODE (t1) != BLKmode)
277 /* If the rhs type is of constant size, use it whatever the modes. At
278 this point it is known to be smaller, or of constant size and the
280 if (TREE_CONSTANT (TYPE_SIZE (t2)))
283 /* Otherwise, if the rhs type is non-BLKmode, use it. */
284 if (TYPE_MODE (t2) != BLKmode)
287 /* In this case, both types have variable size and BLKmode. It's
288 probably best to leave the "type mismatch" because changing it
289 could cause a bad self-referential reference. */
293 /* See if EXP contains a SAVE_EXPR in a position where we would
296 ??? This is a real kludge, but is probably the best approach short
297 of some very general solution. */
300 contains_save_expr_p (tree exp)
302 switch (TREE_CODE (exp))
307 case ADDR_EXPR: case INDIRECT_REF:
309 CASE_CONVERT: case VIEW_CONVERT_EXPR:
310 return contains_save_expr_p (TREE_OPERAND (exp, 0));
315 unsigned HOST_WIDE_INT ix;
317 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), ix, value)
318 if (contains_save_expr_p (value))
328 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
329 it if so. This is used to detect types whose sizes involve computations
330 that are known to raise Constraint_Error. */
333 contains_null_expr (tree exp)
337 if (TREE_CODE (exp) == NULL_EXPR)
340 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
343 return contains_null_expr (TREE_OPERAND (exp, 0));
347 tem = contains_null_expr (TREE_OPERAND (exp, 0));
351 return contains_null_expr (TREE_OPERAND (exp, 1));
354 switch (TREE_CODE (exp))
357 return contains_null_expr (TREE_OPERAND (exp, 0));
360 tem = contains_null_expr (TREE_OPERAND (exp, 0));
364 tem = contains_null_expr (TREE_OPERAND (exp, 1));
368 return contains_null_expr (TREE_OPERAND (exp, 2));
379 /* Return an expression tree representing an equality comparison of
380 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
381 be of type RESULT_TYPE
383 Two arrays are equal in one of two ways: (1) if both have zero length
384 in some dimension (not necessarily the same dimension) or (2) if the
385 lengths in each dimension are equal and the data is equal. We perform the
386 length tests in as efficient a manner as possible. */
389 compare_arrays (tree result_type, tree a1, tree a2)
391 tree t1 = TREE_TYPE (a1);
392 tree t2 = TREE_TYPE (a2);
393 tree result = convert (result_type, integer_one_node);
394 tree a1_is_null = convert (result_type, integer_zero_node);
395 tree a2_is_null = convert (result_type, integer_zero_node);
396 bool length_zero_p = false;
398 /* Process each dimension separately and compare the lengths. If any
399 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
400 suppress the comparison of the data. */
401 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
403 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
404 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
405 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
406 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
407 tree bt = get_base_type (TREE_TYPE (lb1));
408 tree length1 = fold_build2 (MINUS_EXPR, bt, ub1, lb1);
409 tree length2 = fold_build2 (MINUS_EXPR, bt, ub2, lb2);
412 tree comparison, this_a1_is_null, this_a2_is_null;
414 /* If the length of the first array is a constant, swap our operands
415 unless the length of the second array is the constant zero.
416 Note that we have set the `length' values to the length - 1. */
417 if (TREE_CODE (length1) == INTEGER_CST
418 && !integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
419 convert (bt, integer_one_node))))
421 tem = a1, a1 = a2, a2 = tem;
422 tem = t1, t1 = t2, t2 = tem;
423 tem = lb1, lb1 = lb2, lb2 = tem;
424 tem = ub1, ub1 = ub2, ub2 = tem;
425 tem = length1, length1 = length2, length2 = tem;
426 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
429 /* If the length of this dimension in the second array is the constant
430 zero, we can just go inside the original bounds for the first
431 array and see if last < first. */
432 if (integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
433 convert (bt, integer_one_node))))
435 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
436 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
438 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
439 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
440 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
442 length_zero_p = true;
443 this_a1_is_null = comparison;
444 this_a2_is_null = convert (result_type, integer_one_node);
447 /* If the length is some other constant value, we know that the
448 this dimension in the first array cannot be superflat, so we
449 can just use its length from the actual stored bounds. */
450 else if (TREE_CODE (length2) == INTEGER_CST)
452 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
453 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
454 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
455 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
456 nbt = get_base_type (TREE_TYPE (ub1));
459 = build_binary_op (EQ_EXPR, result_type,
460 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
461 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
463 /* Note that we know that UB2 and LB2 are constant and hence
464 cannot contain a PLACEHOLDER_EXPR. */
466 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
467 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
469 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
470 this_a2_is_null = convert (result_type, integer_zero_node);
473 /* Otherwise compare the computed lengths. */
476 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
477 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
480 = build_binary_op (EQ_EXPR, result_type, length1, length2);
483 = build_binary_op (LT_EXPR, result_type, length1,
484 convert (bt, integer_zero_node));
486 = build_binary_op (LT_EXPR, result_type, length2,
487 convert (bt, integer_zero_node));
490 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
493 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
494 this_a1_is_null, a1_is_null);
495 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
496 this_a2_is_null, a2_is_null);
502 /* Unless the size of some bound is known to be zero, compare the
503 data in the array. */
506 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
509 a1 = convert (type, a1), a2 = convert (type, a2);
511 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
512 fold_build2 (EQ_EXPR, result_type, a1, a2));
516 /* The result is also true if both sizes are zero. */
517 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
518 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
519 a1_is_null, a2_is_null),
522 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
523 starting the comparison above since the place it would be otherwise
524 evaluated would be wrong. */
526 if (contains_save_expr_p (a1))
527 result = build2 (COMPOUND_EXPR, result_type, a1, result);
529 if (contains_save_expr_p (a2))
530 result = build2 (COMPOUND_EXPR, result_type, a2, result);
535 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
536 type TYPE. We know that TYPE is a modular type with a nonbinary
540 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
543 tree modulus = TYPE_MODULUS (type);
544 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
545 unsigned int precision;
546 bool unsignedp = true;
550 /* If this is an addition of a constant, convert it to a subtraction
551 of a constant since we can do that faster. */
552 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
554 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
555 op_code = MINUS_EXPR;
558 /* For the logical operations, we only need PRECISION bits. For
559 addition and subtraction, we need one more and for multiplication we
560 need twice as many. But we never want to make a size smaller than
562 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
563 needed_precision += 1;
564 else if (op_code == MULT_EXPR)
565 needed_precision *= 2;
567 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
569 /* Unsigned will do for everything but subtraction. */
570 if (op_code == MINUS_EXPR)
573 /* If our type is the wrong signedness or isn't wide enough, make a new
574 type and convert both our operands to it. */
575 if (TYPE_PRECISION (op_type) < precision
576 || TYPE_UNSIGNED (op_type) != unsignedp)
578 /* Copy the node so we ensure it can be modified to make it modular. */
579 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
580 modulus = convert (op_type, modulus);
581 SET_TYPE_MODULUS (op_type, modulus);
582 TYPE_MODULAR_P (op_type) = 1;
583 lhs = convert (op_type, lhs);
584 rhs = convert (op_type, rhs);
587 /* Do the operation, then we'll fix it up. */
588 result = fold_build2 (op_code, op_type, lhs, rhs);
590 /* For multiplication, we have no choice but to do a full modulus
591 operation. However, we want to do this in the narrowest
593 if (op_code == MULT_EXPR)
595 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
596 modulus = convert (div_type, modulus);
597 SET_TYPE_MODULUS (div_type, modulus);
598 TYPE_MODULAR_P (div_type) = 1;
599 result = convert (op_type,
600 fold_build2 (TRUNC_MOD_EXPR, div_type,
601 convert (div_type, result), modulus));
604 /* For subtraction, add the modulus back if we are negative. */
605 else if (op_code == MINUS_EXPR)
607 result = save_expr (result);
608 result = fold_build3 (COND_EXPR, op_type,
609 fold_build2 (LT_EXPR, integer_type_node, result,
610 convert (op_type, integer_zero_node)),
611 fold_build2 (PLUS_EXPR, op_type, result, modulus),
615 /* For the other operations, subtract the modulus if we are >= it. */
618 result = save_expr (result);
619 result = fold_build3 (COND_EXPR, op_type,
620 fold_build2 (GE_EXPR, integer_type_node,
622 fold_build2 (MINUS_EXPR, op_type,
627 return convert (type, result);
630 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
631 desired for the result. Usually the operation is to be performed
632 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
633 in which case the type to be used will be derived from the operands.
635 This function is very much unlike the ones for C and C++ since we
636 have already done any type conversion and matching required. All we
637 have to do here is validate the work done by SEM and handle subtypes. */
640 build_binary_op (enum tree_code op_code, tree result_type,
641 tree left_operand, tree right_operand)
643 tree left_type = TREE_TYPE (left_operand);
644 tree right_type = TREE_TYPE (right_operand);
645 tree left_base_type = get_base_type (left_type);
646 tree right_base_type = get_base_type (right_type);
647 tree operation_type = result_type;
648 tree best_type = NULL_TREE;
649 tree modulus, result;
650 bool has_side_effects = false;
653 && TREE_CODE (operation_type) == RECORD_TYPE
654 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
655 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
658 && !AGGREGATE_TYPE_P (operation_type)
659 && TYPE_EXTRA_SUBTYPE_P (operation_type))
660 operation_type = get_base_type (operation_type);
662 modulus = (operation_type
663 && TREE_CODE (operation_type) == INTEGER_TYPE
664 && TYPE_MODULAR_P (operation_type)
665 ? TYPE_MODULUS (operation_type) : NULL_TREE);
670 /* If there were integral or pointer conversions on the LHS, remove
671 them; we'll be putting them back below if needed. Likewise for
672 conversions between array and record types, except for justified
673 modular types. But don't do this if the right operand is not
674 BLKmode (for packed arrays) unless we are not changing the mode. */
675 while ((CONVERT_EXPR_P (left_operand)
676 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
677 && (((INTEGRAL_TYPE_P (left_type)
678 || POINTER_TYPE_P (left_type))
679 && (INTEGRAL_TYPE_P (TREE_TYPE
680 (TREE_OPERAND (left_operand, 0)))
681 || POINTER_TYPE_P (TREE_TYPE
682 (TREE_OPERAND (left_operand, 0)))))
683 || (((TREE_CODE (left_type) == RECORD_TYPE
684 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
685 || TREE_CODE (left_type) == ARRAY_TYPE)
686 && ((TREE_CODE (TREE_TYPE
687 (TREE_OPERAND (left_operand, 0)))
689 || (TREE_CODE (TREE_TYPE
690 (TREE_OPERAND (left_operand, 0)))
692 && (TYPE_MODE (right_type) == BLKmode
693 || (TYPE_MODE (left_type)
694 == TYPE_MODE (TREE_TYPE
696 (left_operand, 0))))))))
698 left_operand = TREE_OPERAND (left_operand, 0);
699 left_type = TREE_TYPE (left_operand);
702 /* If a class-wide type may be involved, force use of the RHS type. */
703 if ((TREE_CODE (right_type) == RECORD_TYPE
704 || TREE_CODE (right_type) == UNION_TYPE)
705 && TYPE_ALIGN_OK (right_type))
706 operation_type = right_type;
708 /* If we are copying between padded objects with compatible types, use
709 the padded view of the objects, this is very likely more efficient.
710 Likewise for a padded that is assigned a constructor, in order to
711 avoid putting a VIEW_CONVERT_EXPR on the LHS. But don't do this if
712 we wouldn't have actually copied anything. */
713 else if (TREE_CODE (left_type) == RECORD_TYPE
714 && TYPE_IS_PADDING_P (left_type)
715 && TREE_CONSTANT (TYPE_SIZE (left_type))
716 && ((TREE_CODE (right_operand) == COMPONENT_REF
717 && TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
720 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
721 && gnat_types_compatible_p
723 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
724 || TREE_CODE (right_operand) == CONSTRUCTOR)
725 && !integer_zerop (TYPE_SIZE (right_type)))
726 operation_type = left_type;
728 /* Find the best type to use for copying between aggregate types. */
729 else if (((TREE_CODE (left_type) == ARRAY_TYPE
730 && TREE_CODE (right_type) == ARRAY_TYPE)
731 || (TREE_CODE (left_type) == RECORD_TYPE
732 && TREE_CODE (right_type) == RECORD_TYPE))
733 && (best_type = find_common_type (left_type, right_type)))
734 operation_type = best_type;
736 /* Otherwise use the LHS type. */
737 else if (!operation_type)
738 operation_type = left_type;
740 /* Ensure everything on the LHS is valid. If we have a field reference,
741 strip anything that get_inner_reference can handle. Then remove any
742 conversions between types having the same code and mode. And mark
743 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
744 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
745 result = left_operand;
748 tree restype = TREE_TYPE (result);
750 if (TREE_CODE (result) == COMPONENT_REF
751 || TREE_CODE (result) == ARRAY_REF
752 || TREE_CODE (result) == ARRAY_RANGE_REF)
753 while (handled_component_p (result))
754 result = TREE_OPERAND (result, 0);
755 else if (TREE_CODE (result) == REALPART_EXPR
756 || TREE_CODE (result) == IMAGPART_EXPR
757 || (CONVERT_EXPR_P (result)
758 && (((TREE_CODE (restype)
759 == TREE_CODE (TREE_TYPE
760 (TREE_OPERAND (result, 0))))
761 && (TYPE_MODE (TREE_TYPE
762 (TREE_OPERAND (result, 0)))
763 == TYPE_MODE (restype)))
764 || TYPE_ALIGN_OK (restype))))
765 result = TREE_OPERAND (result, 0);
766 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
768 TREE_ADDRESSABLE (result) = 1;
769 result = TREE_OPERAND (result, 0);
775 gcc_assert (TREE_CODE (result) == INDIRECT_REF
776 || TREE_CODE (result) == NULL_EXPR
779 /* Convert the right operand to the operation type unless it is
780 either already of the correct type or if the type involves a
781 placeholder, since the RHS may not have the same record type. */
782 if (operation_type != right_type
783 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
785 right_operand = convert (operation_type, right_operand);
786 right_type = operation_type;
789 /* If the left operand is not of the same type as the operation
790 type, wrap it up in a VIEW_CONVERT_EXPR. */
791 if (left_type != operation_type)
792 left_operand = unchecked_convert (operation_type, left_operand, false);
794 has_side_effects = true;
800 operation_type = TREE_TYPE (left_type);
802 /* ... fall through ... */
804 case ARRAY_RANGE_REF:
805 /* First look through conversion between type variants. Note that
806 this changes neither the operation type nor the type domain. */
807 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
808 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
809 == TYPE_MAIN_VARIANT (left_type))
811 left_operand = TREE_OPERAND (left_operand, 0);
812 left_type = TREE_TYPE (left_operand);
815 /* Then convert the right operand to its base type. This will prevent
816 unneeded sign conversions when sizetype is wider than integer. */
817 right_operand = convert (right_base_type, right_operand);
818 right_operand = convert (sizetype, right_operand);
820 if (!TREE_CONSTANT (right_operand)
821 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
822 gnat_mark_addressable (left_operand);
831 gcc_assert (!POINTER_TYPE_P (left_type));
833 /* ... fall through ... */
837 /* If either operand is a NULL_EXPR, just return a new one. */
838 if (TREE_CODE (left_operand) == NULL_EXPR)
839 return build2 (op_code, result_type,
840 build1 (NULL_EXPR, integer_type_node,
841 TREE_OPERAND (left_operand, 0)),
844 else if (TREE_CODE (right_operand) == NULL_EXPR)
845 return build2 (op_code, result_type,
846 build1 (NULL_EXPR, integer_type_node,
847 TREE_OPERAND (right_operand, 0)),
850 /* If either object is a justified modular types, get the
851 fields from within. */
852 if (TREE_CODE (left_type) == RECORD_TYPE
853 && TYPE_JUSTIFIED_MODULAR_P (left_type))
855 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
857 left_type = TREE_TYPE (left_operand);
858 left_base_type = get_base_type (left_type);
861 if (TREE_CODE (right_type) == RECORD_TYPE
862 && TYPE_JUSTIFIED_MODULAR_P (right_type))
864 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
866 right_type = TREE_TYPE (right_operand);
867 right_base_type = get_base_type (right_type);
870 /* If both objects are arrays, compare them specially. */
871 if ((TREE_CODE (left_type) == ARRAY_TYPE
872 || (TREE_CODE (left_type) == INTEGER_TYPE
873 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
874 && (TREE_CODE (right_type) == ARRAY_TYPE
875 || (TREE_CODE (right_type) == INTEGER_TYPE
876 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
878 result = compare_arrays (result_type, left_operand, right_operand);
880 if (op_code == NE_EXPR)
881 result = invert_truthvalue (result);
883 gcc_assert (op_code == EQ_EXPR);
888 /* Otherwise, the base types must be the same unless the objects are
889 fat pointers or records. If we have records, use the best type and
890 convert both operands to that type. */
891 if (left_base_type != right_base_type)
893 if (TYPE_FAT_POINTER_P (left_base_type)
894 && TYPE_FAT_POINTER_P (right_base_type)
895 && TYPE_MAIN_VARIANT (left_base_type)
896 == TYPE_MAIN_VARIANT (right_base_type))
897 best_type = left_base_type;
898 else if (TREE_CODE (left_base_type) == RECORD_TYPE
899 && TREE_CODE (right_base_type) == RECORD_TYPE)
901 /* The only way these are permitted to be the same is if both
902 types have the same name. In that case, one of them must
903 not be self-referential. Use that one as the best type.
904 Even better is if one is of fixed size. */
905 gcc_assert (TYPE_NAME (left_base_type)
906 && (TYPE_NAME (left_base_type)
907 == TYPE_NAME (right_base_type)));
909 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
910 best_type = left_base_type;
911 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
912 best_type = right_base_type;
913 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
914 best_type = left_base_type;
915 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
916 best_type = right_base_type;
923 left_operand = convert (best_type, left_operand);
924 right_operand = convert (best_type, right_operand);
927 /* If we are comparing a fat pointer against zero, we need to
928 just compare the data pointer. */
929 else if (TYPE_FAT_POINTER_P (left_base_type)
930 && TREE_CODE (right_operand) == CONSTRUCTOR
931 && integer_zerop (VEC_index (constructor_elt,
932 CONSTRUCTOR_ELTS (right_operand),
936 right_operand = build_component_ref (left_operand, NULL_TREE,
937 TYPE_FIELDS (left_base_type),
939 left_operand = convert (TREE_TYPE (right_operand),
944 left_operand = convert (left_base_type, left_operand);
945 right_operand = convert (right_base_type, right_operand);
951 case PREINCREMENT_EXPR:
952 case PREDECREMENT_EXPR:
953 case POSTINCREMENT_EXPR:
954 case POSTDECREMENT_EXPR:
955 /* These operations are not used anymore. */
962 /* The RHS of a shift can be any type. Also, ignore any modulus
963 (we used to abort, but this is needed for unchecked conversion
964 to modular types). Otherwise, processing is the same as normal. */
965 gcc_assert (operation_type == left_base_type);
967 left_operand = convert (operation_type, left_operand);
970 case TRUTH_ANDIF_EXPR:
971 case TRUTH_ORIF_EXPR:
975 left_operand = gnat_truthvalue_conversion (left_operand);
976 right_operand = gnat_truthvalue_conversion (right_operand);
982 /* For binary modulus, if the inputs are in range, so are the
984 if (modulus && integer_pow2p (modulus))
989 gcc_assert (TREE_TYPE (result_type) == left_base_type
990 && TREE_TYPE (result_type) == right_base_type);
991 left_operand = convert (left_base_type, left_operand);
992 right_operand = convert (right_base_type, right_operand);
995 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
996 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
997 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
998 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
999 /* These always produce results lower than either operand. */
1000 modulus = NULL_TREE;
1003 case POINTER_PLUS_EXPR:
1004 gcc_assert (operation_type == left_base_type
1005 && sizetype == right_base_type);
1006 left_operand = convert (operation_type, left_operand);
1007 right_operand = convert (sizetype, right_operand);
1010 case PLUS_NOMOD_EXPR:
1011 case MINUS_NOMOD_EXPR:
1012 if (op_code == PLUS_NOMOD_EXPR)
1013 op_code = PLUS_EXPR;
1015 op_code = MINUS_EXPR;
1016 modulus = NULL_TREE;
1018 /* ... fall through ... */
1022 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1023 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1024 these types but can generate addition/subtraction for Succ/Pred. */
1026 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1027 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1028 operation_type = left_base_type = right_base_type
1029 = gnat_type_for_mode (TYPE_MODE (operation_type),
1030 TYPE_UNSIGNED (operation_type));
1032 /* ... fall through ... */
1036 /* The result type should be the same as the base types of the
1037 both operands (and they should be the same). Convert
1038 everything to the result type. */
1040 gcc_assert (operation_type == left_base_type
1041 && left_base_type == right_base_type);
1042 left_operand = convert (operation_type, left_operand);
1043 right_operand = convert (operation_type, right_operand);
1046 if (modulus && !integer_pow2p (modulus))
1048 result = nonbinary_modular_operation (op_code, operation_type,
1049 left_operand, right_operand);
1050 modulus = NULL_TREE;
1052 /* If either operand is a NULL_EXPR, just return a new one. */
1053 else if (TREE_CODE (left_operand) == NULL_EXPR)
1054 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1055 else if (TREE_CODE (right_operand) == NULL_EXPR)
1056 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1057 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1058 result = fold (build4 (op_code, operation_type, left_operand,
1059 right_operand, NULL_TREE, NULL_TREE));
1062 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1064 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1065 TREE_CONSTANT (result)
1066 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1067 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1069 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1070 && TYPE_VOLATILE (operation_type))
1071 TREE_THIS_VOLATILE (result) = 1;
1073 /* If we are working with modular types, perform the MOD operation
1074 if something above hasn't eliminated the need for it. */
1076 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1077 convert (operation_type, modulus));
1079 if (result_type && result_type != operation_type)
1080 result = convert (result_type, result);
1085 /* Similar, but for unary operations. */
1088 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1090 tree type = TREE_TYPE (operand);
1091 tree base_type = get_base_type (type);
1092 tree operation_type = result_type;
1094 bool side_effects = false;
1097 && TREE_CODE (operation_type) == RECORD_TYPE
1098 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1099 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1102 && !AGGREGATE_TYPE_P (operation_type)
1103 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1104 operation_type = get_base_type (operation_type);
1110 if (!operation_type)
1111 result_type = operation_type = TREE_TYPE (type);
1113 gcc_assert (result_type == TREE_TYPE (type));
1115 result = fold_build1 (op_code, operation_type, operand);
1118 case TRUTH_NOT_EXPR:
1119 gcc_assert (result_type == base_type);
1120 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1123 case ATTR_ADDR_EXPR:
1125 switch (TREE_CODE (operand))
1128 case UNCONSTRAINED_ARRAY_REF:
1129 result = TREE_OPERAND (operand, 0);
1131 /* Make sure the type here is a pointer, not a reference.
1132 GCC wants pointer types for function addresses. */
1134 result_type = build_pointer_type (type);
1136 /* If the underlying object can alias everything, propagate the
1137 property since we are effectively retrieving the object. */
1138 if (POINTER_TYPE_P (TREE_TYPE (result))
1139 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1141 if (TREE_CODE (result_type) == POINTER_TYPE
1142 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1144 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1145 TYPE_MODE (result_type),
1147 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1148 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1150 = build_reference_type_for_mode (TREE_TYPE (result_type),
1151 TYPE_MODE (result_type),
1158 TREE_TYPE (result) = type = build_pointer_type (type);
1162 case ARRAY_RANGE_REF:
1165 /* If this is for 'Address, find the address of the prefix and
1166 add the offset to the field. Otherwise, do this the normal
1168 if (op_code == ATTR_ADDR_EXPR)
1170 HOST_WIDE_INT bitsize;
1171 HOST_WIDE_INT bitpos;
1173 enum machine_mode mode;
1174 int unsignedp, volatilep;
1176 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1177 &mode, &unsignedp, &volatilep,
1180 /* If INNER is a padding type whose field has a self-referential
1181 size, convert to that inner type. We know the offset is zero
1182 and we need to have that type visible. */
1183 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1184 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1185 && (CONTAINS_PLACEHOLDER_P
1186 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1187 (TREE_TYPE (inner)))))))
1188 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1191 /* Compute the offset as a byte offset from INNER. */
1193 offset = size_zero_node;
1195 if (bitpos % BITS_PER_UNIT != 0)
1197 ("taking address of object not aligned on storage unit?",
1200 offset = size_binop (PLUS_EXPR, offset,
1201 size_int (bitpos / BITS_PER_UNIT));
1203 /* Take the address of INNER, convert the offset to void *, and
1204 add then. It will later be converted to the desired result
1206 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1207 inner = convert (ptr_void_type_node, inner);
1208 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1210 result = convert (build_pointer_type (TREE_TYPE (operand)),
1217 /* If this is just a constructor for a padded record, we can
1218 just take the address of the single field and convert it to
1219 a pointer to our type. */
1220 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1222 result = (VEC_index (constructor_elt,
1223 CONSTRUCTOR_ELTS (operand),
1227 result = convert (build_pointer_type (TREE_TYPE (operand)),
1228 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1235 if (AGGREGATE_TYPE_P (type)
1236 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1237 return build_unary_op (ADDR_EXPR, result_type,
1238 TREE_OPERAND (operand, 0));
1240 /* ... fallthru ... */
1242 case VIEW_CONVERT_EXPR:
1243 /* If this just a variant conversion or if the conversion doesn't
1244 change the mode, get the result type from this type and go down.
1245 This is needed for conversions of CONST_DECLs, to eventually get
1246 to the address of their CORRESPONDING_VARs. */
1247 if ((TYPE_MAIN_VARIANT (type)
1248 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1249 || (TYPE_MODE (type) != BLKmode
1250 && (TYPE_MODE (type)
1251 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1252 return build_unary_op (ADDR_EXPR,
1253 (result_type ? result_type
1254 : build_pointer_type (type)),
1255 TREE_OPERAND (operand, 0));
1259 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1261 /* ... fall through ... */
1266 /* If we are taking the address of a padded record whose field is
1267 contains a template, take the address of the template. */
1268 if (TREE_CODE (type) == RECORD_TYPE
1269 && TYPE_IS_PADDING_P (type)
1270 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1271 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1273 type = TREE_TYPE (TYPE_FIELDS (type));
1274 operand = convert (type, operand);
1277 if (type != error_mark_node)
1278 operation_type = build_pointer_type (type);
1280 gnat_mark_addressable (operand);
1281 result = fold_build1 (ADDR_EXPR, operation_type, operand);
1284 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1288 /* If we want to refer to an entire unconstrained array,
1289 make up an expression to do so. This will never survive to
1290 the backend. If TYPE is a thin pointer, first convert the
1291 operand to a fat pointer. */
1292 if (TYPE_THIN_POINTER_P (type)
1293 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1296 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1298 type = TREE_TYPE (operand);
1301 if (TYPE_FAT_POINTER_P (type))
1303 result = build1 (UNCONSTRAINED_ARRAY_REF,
1304 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1305 TREE_READONLY (result) = TREE_STATIC (result)
1306 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1308 else if (TREE_CODE (operand) == ADDR_EXPR)
1309 result = TREE_OPERAND (operand, 0);
1313 result = fold_build1 (op_code, TREE_TYPE (type), operand);
1314 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1318 = (!TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1324 tree modulus = ((operation_type
1325 && TREE_CODE (operation_type) == INTEGER_TYPE
1326 && TYPE_MODULAR_P (operation_type))
1327 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1328 int mod_pow2 = modulus && integer_pow2p (modulus);
1330 /* If this is a modular type, there are various possibilities
1331 depending on the operation and whether the modulus is a
1332 power of two or not. */
1336 gcc_assert (operation_type == base_type);
1337 operand = convert (operation_type, operand);
1339 /* The fastest in the negate case for binary modulus is
1340 the straightforward code; the TRUNC_MOD_EXPR below
1341 is an AND operation. */
1342 if (op_code == NEGATE_EXPR && mod_pow2)
1343 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1344 fold_build1 (NEGATE_EXPR, operation_type,
1348 /* For nonbinary negate case, return zero for zero operand,
1349 else return the modulus minus the operand. If the modulus
1350 is a power of two minus one, we can do the subtraction
1351 as an XOR since it is equivalent and faster on most machines. */
1352 else if (op_code == NEGATE_EXPR && !mod_pow2)
1354 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1356 convert (operation_type,
1357 integer_one_node))))
1358 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1361 result = fold_build2 (MINUS_EXPR, operation_type,
1364 result = fold_build3 (COND_EXPR, operation_type,
1365 fold_build2 (NE_EXPR,
1370 integer_zero_node)),
1375 /* For the NOT cases, we need a constant equal to
1376 the modulus minus one. For a binary modulus, we
1377 XOR against the constant and subtract the operand from
1378 that constant for nonbinary modulus. */
1380 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1381 convert (operation_type,
1385 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1388 result = fold_build2 (MINUS_EXPR, operation_type,
1396 /* ... fall through ... */
1399 gcc_assert (operation_type == base_type);
1400 result = fold_build1 (op_code, operation_type,
1401 convert (operation_type, operand));
1406 TREE_SIDE_EFFECTS (result) = 1;
1407 if (TREE_CODE (result) == INDIRECT_REF)
1408 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1411 if (result_type && TREE_TYPE (result) != result_type)
1412 result = convert (result_type, result);
1417 /* Similar, but for COND_EXPR. */
1420 build_cond_expr (tree result_type, tree condition_operand,
1421 tree true_operand, tree false_operand)
1423 bool addr_p = false;
1426 /* The front-end verified that result, true and false operands have
1427 same base type. Convert everything to the result type. */
1428 true_operand = convert (result_type, true_operand);
1429 false_operand = convert (result_type, false_operand);
1431 /* If the result type is unconstrained, take the address of the operands
1432 and then dereference our result. */
1433 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1434 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1436 result_type = build_pointer_type (result_type);
1437 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1438 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1442 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1443 true_operand, false_operand);
1445 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1446 in both arms, make sure it gets evaluated by moving it ahead of the
1447 conditional expression. This is necessary because it is evaluated
1448 in only one place at run time and would otherwise be uninitialized
1449 in one of the arms. */
1450 true_operand = skip_simple_arithmetic (true_operand);
1451 false_operand = skip_simple_arithmetic (false_operand);
1453 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1454 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1457 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1462 /* Similar, but for RETURN_EXPR. If RESULT_DECL is non-zero, build
1463 a RETURN_EXPR around the assignment of RET_VAL to RESULT_DECL.
1464 If RESULT_DECL is zero, build a bare RETURN_EXPR. */
1467 build_return_expr (tree result_decl, tree ret_val)
1473 /* The gimplifier explicitly enforces the following invariant:
1482 As a consequence, type-homogeneity dictates that we use the type
1483 of the RESULT_DECL as the operation type. */
1485 tree operation_type = TREE_TYPE (result_decl);
1487 /* Convert the right operand to the operation type. Note that
1488 it's the same transformation as in the MODIFY_EXPR case of
1489 build_binary_op with the additional guarantee that the type
1490 cannot involve a placeholder, since otherwise the function
1491 would use the "target pointer" return mechanism. */
1493 if (operation_type != TREE_TYPE (ret_val))
1494 ret_val = convert (operation_type, ret_val);
1497 = build2 (MODIFY_EXPR, operation_type, result_decl, ret_val);
1500 result_expr = NULL_TREE;
1502 return build1 (RETURN_EXPR, void_type_node, result_expr);
1505 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1509 build_call_1_expr (tree fundecl, tree arg)
1511 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1512 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1514 TREE_SIDE_EFFECTS (call) = 1;
1518 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1522 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1524 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1525 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1527 TREE_SIDE_EFFECTS (call) = 1;
1531 /* Likewise to call FUNDECL with no arguments. */
1534 build_call_0_expr (tree fundecl)
1536 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1537 it possible to propagate DECL_IS_PURE on parameterless functions. */
1538 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1539 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1544 /* Call a function that raises an exception and pass the line number and file
1545 name, if requested. MSG says which exception function to call.
1547 GNAT_NODE is the gnat node conveying the source location for which the
1548 error should be signaled, or Empty in which case the error is signaled on
1549 the current ref_file_name/input_line.
1551 KIND says which kind of exception this is for
1552 (N_Raise_{Constraint,Storage,Program}_Error). */
1555 build_call_raise (int msg, Node_Id gnat_node, char kind)
1557 tree fndecl = gnat_raise_decls[msg];
1558 tree label = get_exception_label (kind);
1564 /* If this is to be done as a goto, handle that case. */
1567 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1568 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1570 /* If Local_Raise is present, generate
1571 Local_Raise (exception'Identity); */
1572 if (Present (local_raise))
1574 tree gnu_local_raise
1575 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1576 tree gnu_exception_entity
1577 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1579 = build_call_1_expr (gnu_local_raise,
1580 build_unary_op (ADDR_EXPR, NULL_TREE,
1581 gnu_exception_entity));
1583 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1584 gnu_call, gnu_result);}
1590 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1592 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1593 ? IDENTIFIER_POINTER
1594 (get_identifier (Get_Name_String
1596 (Get_Source_File_Index (Sloc (gnat_node))))))
1600 filename = build_string (len, str);
1602 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1603 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1605 TREE_TYPE (filename)
1606 = build_array_type (char_type_node, build_index_type (size_int (len)));
1609 build_call_2_expr (fndecl,
1610 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1612 build_int_cst (NULL_TREE, line_number));
1615 /* qsort comparer for the bit positions of two constructor elements
1616 for record components. */
1619 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1621 const_tree const elmt1 = * (const_tree const *) rt1;
1622 const_tree const elmt2 = * (const_tree const *) rt2;
1623 const_tree const field1 = TREE_PURPOSE (elmt1);
1624 const_tree const field2 = TREE_PURPOSE (elmt2);
1626 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1628 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1631 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1634 gnat_build_constructor (tree type, tree list)
1636 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1637 bool side_effects = false;
1641 /* Scan the elements to see if they are all constant or if any has side
1642 effects, to let us set global flags on the resulting constructor. Count
1643 the elements along the way for possible sorting purposes below. */
1644 for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1646 tree obj = TREE_PURPOSE (elmt);
1647 tree val = TREE_VALUE (elmt);
1649 /* The predicate must be in keeping with output_constructor. */
1650 if (!TREE_CONSTANT (val)
1651 || (TREE_CODE (type) == RECORD_TYPE
1652 && CONSTRUCTOR_BITFIELD_P (obj)
1653 && !initializer_constant_valid_for_bitfield_p (val))
1654 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1655 allconstant = false;
1657 if (TREE_SIDE_EFFECTS (val))
1658 side_effects = true;
1660 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1661 be executing the code we generate here in that case, but handle it
1662 specially to avoid the compiler blowing up. */
1663 if (TREE_CODE (type) == RECORD_TYPE
1664 && (result = contains_null_expr (DECL_SIZE (obj))) != NULL_TREE)
1665 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1668 /* For record types with constant components only, sort field list
1669 by increasing bit position. This is necessary to ensure the
1670 constructor can be output as static data. */
1671 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1673 /* Fill an array with an element tree per index, and ask qsort to order
1674 them according to what a bitpos comparison function says. */
1675 tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1678 for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1681 qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1683 /* Then reconstruct the list from the sorted array contents. */
1685 for (i = n_elmts - 1; i >= 0; i--)
1687 TREE_CHAIN (gnu_arr[i]) = list;
1692 result = build_constructor_from_list (type, list);
1693 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1694 TREE_SIDE_EFFECTS (result) = side_effects;
1695 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1699 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1700 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1701 for the field. Don't fold the result if NO_FOLD_P is true.
1703 We also handle the fact that we might have been passed a pointer to the
1704 actual record and know how to look for fields in variant parts. */
1707 build_simple_component_ref (tree record_variable, tree component,
1708 tree field, bool no_fold_p)
1710 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1711 tree ref, inner_variable;
1713 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1714 || TREE_CODE (record_type) == UNION_TYPE
1715 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1716 && TYPE_SIZE (record_type)
1717 && (component != 0) != (field != 0));
1719 /* If no field was specified, look for a field with the specified name
1720 in the current record only. */
1722 for (field = TYPE_FIELDS (record_type); field;
1723 field = TREE_CHAIN (field))
1724 if (DECL_NAME (field) == component)
1730 /* If this field is not in the specified record, see if we can find
1731 something in the record whose original field is the same as this one. */
1732 if (DECL_CONTEXT (field) != record_type)
1733 /* Check if there is a field with name COMPONENT in the record. */
1737 /* First loop thru normal components. */
1739 for (new_field = TYPE_FIELDS (record_type); new_field;
1740 new_field = TREE_CHAIN (new_field))
1741 if (field == new_field
1742 || DECL_ORIGINAL_FIELD (new_field) == field
1743 || new_field == DECL_ORIGINAL_FIELD (field)
1744 || (DECL_ORIGINAL_FIELD (field)
1745 && (DECL_ORIGINAL_FIELD (field)
1746 == DECL_ORIGINAL_FIELD (new_field))))
1749 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1750 the component in the first search. Doing this search in 2 steps
1751 is required to avoiding hidden homonymous fields in the
1755 for (new_field = TYPE_FIELDS (record_type); new_field;
1756 new_field = TREE_CHAIN (new_field))
1757 if (DECL_INTERNAL_P (new_field))
1760 = build_simple_component_ref (record_variable,
1761 NULL_TREE, new_field, no_fold_p);
1762 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1775 /* If the field's offset has overflowed, do not attempt to access it
1776 as doing so may trigger sanity checks deeper in the back-end.
1777 Note that we don't need to warn since this will be done on trying
1778 to declare the object. */
1779 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1780 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1783 /* Look through conversion between type variants. Note that this
1784 is transparent as far as the field is concerned. */
1785 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1786 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1788 inner_variable = TREE_OPERAND (record_variable, 0);
1790 inner_variable = record_variable;
1792 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1795 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1796 TREE_READONLY (ref) = 1;
1797 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1798 || TYPE_VOLATILE (record_type))
1799 TREE_THIS_VOLATILE (ref) = 1;
1804 /* The generic folder may punt in this case because the inner array type
1805 can be self-referential, but folding is in fact not problematic. */
1806 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1807 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1809 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1810 unsigned HOST_WIDE_INT idx;
1812 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1822 /* Like build_simple_component_ref, except that we give an error if the
1823 reference could not be found. */
1826 build_component_ref (tree record_variable, tree component,
1827 tree field, bool no_fold_p)
1829 tree ref = build_simple_component_ref (record_variable, component, field,
1835 /* If FIELD was specified, assume this is an invalid user field so raise
1836 Constraint_Error. Otherwise, we have no type to return so abort. */
1838 return build1 (NULL_EXPR, TREE_TYPE (field),
1839 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1840 N_Raise_Constraint_Error));
1843 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1844 identically. Process the case where a GNAT_PROC to call is provided. */
1847 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1848 Entity_Id gnat_proc, Entity_Id gnat_pool)
1850 tree gnu_proc = gnat_to_gnu (gnat_proc);
1851 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1854 /* The storage pools are obviously always tagged types, but the
1855 secondary stack uses the same mechanism and is not tagged. */
1856 if (Is_Tagged_Type (Etype (gnat_pool)))
1858 /* The size is the third parameter; the alignment is the
1860 Entity_Id gnat_size_type
1861 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1862 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1864 tree gnu_pool = gnat_to_gnu (gnat_pool);
1865 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1866 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1868 gnu_size = convert (gnu_size_type, gnu_size);
1869 gnu_align = convert (gnu_size_type, gnu_align);
1871 /* The first arg is always the address of the storage pool; next
1872 comes the address of the object, for a deallocator, then the
1873 size and alignment. */
1875 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1876 gnu_proc_addr, 4, gnu_pool_addr,
1877 gnu_obj, gnu_size, gnu_align);
1879 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1880 gnu_proc_addr, 3, gnu_pool_addr,
1881 gnu_size, gnu_align);
1884 /* Secondary stack case. */
1887 /* The size is the second parameter. */
1888 Entity_Id gnat_size_type
1889 = Etype (Next_Formal (First_Formal (gnat_proc)));
1890 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1892 gnu_size = convert (gnu_size_type, gnu_size);
1894 /* The first arg is the address of the object, for a deallocator,
1897 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1898 gnu_proc_addr, 2, gnu_obj, gnu_size);
1900 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1901 gnu_proc_addr, 1, gnu_size);
1904 TREE_SIDE_EFFECTS (gnu_call) = 1;
1908 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
1909 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1910 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1914 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1916 /* When the DATA_TYPE alignment is stricter than what malloc offers
1917 (super-aligned case), we allocate an "aligning" wrapper type and return
1918 the address of its single data field with the malloc's return value
1919 stored just in front. */
1921 unsigned int data_align = TYPE_ALIGN (data_type);
1922 unsigned int default_allocator_alignment
1923 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1926 = ((data_align > default_allocator_alignment)
1927 ? make_aligning_type (data_type, data_align, data_size,
1928 default_allocator_alignment,
1929 POINTER_SIZE / BITS_PER_UNIT)
1933 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1937 /* On VMS, if 64-bit memory is disabled or pointers are 64-bit and the
1938 allocator size is 32-bit or Convention C, allocate 32-bit memory. */
1939 if (TARGET_ABI_OPEN_VMS
1940 && (!TARGET_MALLOC64
1941 || (POINTER_SIZE == 64
1942 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1943 || Convention (Etype (gnat_node)) == Convention_C))))
1944 malloc_ptr = build_call_1_expr (malloc32_decl, size_to_malloc);
1946 malloc_ptr = build_call_1_expr (malloc_decl, size_to_malloc);
1950 /* Latch malloc's return value and get a pointer to the aligning field
1952 tree storage_ptr = save_expr (malloc_ptr);
1954 tree aligning_record_addr
1955 = convert (build_pointer_type (aligning_type), storage_ptr);
1957 tree aligning_record
1958 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1961 = build_component_ref (aligning_record, NULL_TREE,
1962 TYPE_FIELDS (aligning_type), 0);
1964 tree aligning_field_addr
1965 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1967 /* Then arrange to store the allocator's return value ahead
1969 tree storage_ptr_slot_addr
1970 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1971 convert (ptr_void_type_node, aligning_field_addr),
1972 size_int (-POINTER_SIZE/BITS_PER_UNIT));
1974 tree storage_ptr_slot
1975 = build_unary_op (INDIRECT_REF, NULL_TREE,
1976 convert (build_pointer_type (ptr_void_type_node),
1977 storage_ptr_slot_addr));
1980 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
1981 build_binary_op (MODIFY_EXPR, NULL_TREE,
1982 storage_ptr_slot, storage_ptr),
1983 aligning_field_addr);
1989 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
1990 designated by DATA_PTR using the __gnat_free entry point. */
1993 maybe_wrap_free (tree data_ptr, tree data_type)
1995 /* In the regular alignment case, we pass the data pointer straight to free.
1996 In the superaligned case, we need to retrieve the initial allocator
1997 return value, stored in front of the data block at allocation time. */
1999 unsigned int data_align = TYPE_ALIGN (data_type);
2000 unsigned int default_allocator_alignment
2001 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
2005 if (data_align > default_allocator_alignment)
2007 /* DATA_FRONT_PTR (void *)
2008 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2011 (POINTER_PLUS_EXPR, ptr_void_type_node,
2012 convert (ptr_void_type_node, data_ptr),
2013 size_int (-POINTER_SIZE/BITS_PER_UNIT));
2015 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2018 (INDIRECT_REF, NULL_TREE,
2019 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2022 free_ptr = data_ptr;
2024 return build_call_1_expr (free_decl, free_ptr);
2027 /* Build a GCC tree to call an allocation or deallocation function.
2028 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2029 generate an allocator.
2031 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2032 object type, used to determine the to-be-honored address alignment.
2033 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2034 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2035 to provide an error location for restriction violation messages. */
2038 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2039 Entity_Id gnat_proc, Entity_Id gnat_pool,
2042 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2044 /* Explicit proc to call ? This one is assumed to deal with the type
2045 alignment constraints. */
2046 if (Present (gnat_proc))
2047 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2048 gnat_proc, gnat_pool);
2050 /* Otherwise, object to "free" or "malloc" with possible special processing
2051 for alignments stricter than what the default allocator honors. */
2053 return maybe_wrap_free (gnu_obj, gnu_type);
2056 /* Assert that we no longer can be called with this special pool. */
2057 gcc_assert (gnat_pool != -1);
2059 /* Check that we aren't violating the associated restriction. */
2060 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2061 Check_No_Implicit_Heap_Alloc (gnat_node);
2063 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2067 /* Build a GCC tree to correspond to allocating an object of TYPE whose
2068 initial value is INIT, if INIT is nonzero. Convert the expression to
2069 RESULT_TYPE, which must be some type of pointer. Return the tree.
2071 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2072 the storage pool to use. GNAT_NODE is used to provide an error
2073 location for restriction violation messages. If IGNORE_INIT_TYPE is
2074 true, ignore the type of INIT for the purpose of determining the size;
2075 this will cause the maximum size to be allocated if TYPE is of
2076 self-referential size. */
2079 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2080 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2082 tree size = TYPE_SIZE_UNIT (type);
2085 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2086 if (init && TREE_CODE (init) == NULL_EXPR)
2087 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2089 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2090 sizes of the object and its template. Allocate the whole thing and
2091 fill in the parts that are known. */
2092 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
2095 = build_unc_object_type_from_ptr (result_type, type,
2096 get_identifier ("ALLOC"));
2097 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2098 tree storage_ptr_type = build_pointer_type (storage_type);
2100 tree template_cons = NULL_TREE;
2102 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2105 /* If the size overflows, pass -1 so the allocator will raise
2107 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2108 size = ssize_int (-1);
2110 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2111 gnat_proc, gnat_pool, gnat_node);
2112 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
2114 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2116 type = TREE_TYPE (TYPE_FIELDS (type));
2119 init = convert (type, init);
2122 /* If there is an initializing expression, make a constructor for
2123 the entire object including the bounds and copy it into the
2124 object. If there is no initializing expression, just set the
2128 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
2130 template_cons = tree_cons (TYPE_FIELDS (storage_type),
2131 build_template (template_type, type,
2137 build2 (COMPOUND_EXPR, storage_ptr_type,
2139 (MODIFY_EXPR, storage_type,
2140 build_unary_op (INDIRECT_REF, NULL_TREE,
2141 convert (storage_ptr_type, storage)),
2142 gnat_build_constructor (storage_type, template_cons)),
2143 convert (storage_ptr_type, storage)));
2147 (COMPOUND_EXPR, result_type,
2149 (MODIFY_EXPR, template_type,
2151 (build_unary_op (INDIRECT_REF, NULL_TREE,
2152 convert (storage_ptr_type, storage)),
2153 NULL_TREE, TYPE_FIELDS (storage_type), 0),
2154 build_template (template_type, type, NULL_TREE)),
2155 convert (result_type, convert (storage_ptr_type, storage)));
2158 /* If we have an initializing expression, see if its size is simpler
2159 than the size from the type. */
2160 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2161 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2162 || CONTAINS_PLACEHOLDER_P (size)))
2163 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2165 /* If the size is still self-referential, reference the initializing
2166 expression, if it is present. If not, this must have been a
2167 call to allocate a library-level object, in which case we use
2168 the maximum size. */
2169 if (CONTAINS_PLACEHOLDER_P (size))
2171 if (!ignore_init_type && init)
2172 size = substitute_placeholder_in_expr (size, init);
2174 size = max_size (size, true);
2177 /* If the size overflows, pass -1 so the allocator will raise
2179 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2180 size = ssize_int (-1);
2182 result = convert (result_type,
2183 build_call_alloc_dealloc (NULL_TREE, size, type,
2184 gnat_proc, gnat_pool,
2187 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
2188 the value, and return the address. Do this with a COMPOUND_EXPR. */
2192 result = save_expr (result);
2194 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2196 (MODIFY_EXPR, NULL_TREE,
2197 build_unary_op (INDIRECT_REF,
2198 TREE_TYPE (TREE_TYPE (result)), result),
2203 return convert (result_type, result);
2206 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2207 GNAT_FORMAL is how we find the descriptor record. GNAT_ACTUAL is
2208 how we derive the source location to raise C_E on an out of range
2212 fill_vms_descriptor (tree expr, Entity_Id gnat_formal, Node_Id gnat_actual)
2215 tree parm_decl = get_gnu_tree (gnat_formal);
2216 tree const_list = NULL_TREE;
2217 tree record_type = TREE_TYPE (TREE_TYPE (parm_decl));
2218 int do_range_check =
2220 IDENTIFIER_POINTER (DECL_NAME (TYPE_FIELDS (record_type))));
2222 expr = maybe_unconstrained_array (expr);
2223 gnat_mark_addressable (expr);
2225 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2227 tree conexpr = convert (TREE_TYPE (field),
2228 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2229 (DECL_INITIAL (field), expr));
2231 /* Check to ensure that only 32bit pointers are passed in
2232 32bit descriptors */
2233 if (do_range_check &&
2234 strcmp (IDENTIFIER_POINTER (DECL_NAME (field)), "POINTER") == 0)
2236 tree pointer64type =
2237 build_pointer_type_for_mode (void_type_node, DImode, false);
2238 tree addr64expr = build_unary_op (ADDR_EXPR, pointer64type, expr);
2240 build_int_cstu (long_integer_type_node, 0x80000000);
2242 add_stmt (build3 (COND_EXPR, void_type_node,
2243 build_binary_op (GE_EXPR, long_integer_type_node,
2244 convert (long_integer_type_node,
2247 build_call_raise (CE_Range_Check_Failed, gnat_actual,
2248 N_Raise_Constraint_Error),
2251 const_list = tree_cons (field, conexpr, const_list);
2254 return gnat_build_constructor (record_type, nreverse (const_list));
2257 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2258 should not be allocated in a register. Returns true if successful. */
2261 gnat_mark_addressable (tree expr_node)
2264 switch (TREE_CODE (expr_node))
2269 case ARRAY_RANGE_REF:
2272 case VIEW_CONVERT_EXPR:
2273 case NON_LVALUE_EXPR:
2275 expr_node = TREE_OPERAND (expr_node, 0);
2279 TREE_ADDRESSABLE (expr_node) = 1;
2285 TREE_ADDRESSABLE (expr_node) = 1;
2289 TREE_ADDRESSABLE (expr_node) = 1;
2293 return (DECL_CONST_CORRESPONDING_VAR (expr_node)
2294 && (gnat_mark_addressable
2295 (DECL_CONST_CORRESPONDING_VAR (expr_node))));