1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "tree-inline.h"
50 static tree find_common_type (tree, tree);
51 static tree compare_arrays (tree, tree, tree);
52 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
53 static tree build_simple_component_ref (tree, tree, tree, bool);
55 /* Return the base type of TYPE. */
58 get_base_type (tree type)
60 if (TREE_CODE (type) == RECORD_TYPE
61 && TYPE_JUSTIFIED_MODULAR_P (type))
62 type = TREE_TYPE (TYPE_FIELDS (type));
64 while (TREE_TYPE (type)
65 && (TREE_CODE (type) == INTEGER_TYPE
66 || TREE_CODE (type) == REAL_TYPE))
67 type = TREE_TYPE (type);
72 /* EXP is a GCC tree representing an address. See if we can find how
73 strictly the object at that address is aligned. Return that alignment
74 in bits. If we don't know anything about the alignment, return 0. */
77 known_alignment (tree exp)
79 unsigned int this_alignment;
80 unsigned int lhs, rhs;
82 switch (TREE_CODE (exp))
85 case VIEW_CONVERT_EXPR:
87 /* Conversions between pointers and integers don't change the alignment
88 of the underlying object. */
89 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
93 /* The value of a COMPOUND_EXPR is that of it's second operand. */
94 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
99 /* If two address are added, the alignment of the result is the
100 minimum of the two alignments. */
101 lhs = known_alignment (TREE_OPERAND (exp, 0));
102 rhs = known_alignment (TREE_OPERAND (exp, 1));
103 this_alignment = MIN (lhs, rhs);
106 case POINTER_PLUS_EXPR:
107 lhs = known_alignment (TREE_OPERAND (exp, 0));
108 rhs = known_alignment (TREE_OPERAND (exp, 1));
109 /* If we don't know the alignment of the offset, we assume that
112 this_alignment = lhs;
114 this_alignment = MIN (lhs, rhs);
118 /* If there is a choice between two values, use the smallest one. */
119 lhs = known_alignment (TREE_OPERAND (exp, 1));
120 rhs = known_alignment (TREE_OPERAND (exp, 2));
121 this_alignment = MIN (lhs, rhs);
126 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
127 /* The first part of this represents the lowest bit in the constant,
128 but it is originally in bytes, not bits. */
129 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
134 /* If we know the alignment of just one side, use it. Otherwise,
135 use the product of the alignments. */
136 lhs = known_alignment (TREE_OPERAND (exp, 0));
137 rhs = known_alignment (TREE_OPERAND (exp, 1));
140 this_alignment = rhs;
142 this_alignment = lhs;
144 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
148 /* A bit-and expression is as aligned as the maximum alignment of the
149 operands. We typically get here for a complex lhs and a constant
150 negative power of two on the rhs to force an explicit alignment, so
151 don't bother looking at the lhs. */
152 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
156 this_alignment = expr_align (TREE_OPERAND (exp, 0));
161 tree t = maybe_inline_call_in_expr (exp);
163 return known_alignment (t);
166 /* Fall through... */
169 /* For other pointer expressions, we assume that the pointed-to object
170 is at least as aligned as the pointed-to type. Beware that we can
171 have a dummy type here (e.g. a Taft Amendment type), for which the
172 alignment is meaningless and should be ignored. */
173 if (POINTER_TYPE_P (TREE_TYPE (exp))
174 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
175 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
181 return this_alignment;
184 /* We have a comparison or assignment operation on two types, T1 and T2, which
185 are either both array types or both record types. T1 is assumed to be for
186 the left hand side operand, and T2 for the right hand side. Return the
187 type that both operands should be converted to for the operation, if any.
188 Otherwise return zero. */
191 find_common_type (tree t1, tree t2)
193 /* ??? As of today, various constructs lead here with types of different
194 sizes even when both constants (e.g. tagged types, packable vs regular
195 component types, padded vs unpadded types, ...). While some of these
196 would better be handled upstream (types should be made consistent before
197 calling into build_binary_op), some others are really expected and we
198 have to be careful. */
200 /* We must prevent writing more than what the target may hold if this is for
201 an assignment and the case of tagged types is handled in build_binary_op
202 so use the lhs type if it is known to be smaller, or of constant size and
203 the rhs type is not, whatever the modes. We also force t1 in case of
204 constant size equality to minimize occurrences of view conversions on the
205 lhs of assignments. */
206 if (TREE_CONSTANT (TYPE_SIZE (t1))
207 && (!TREE_CONSTANT (TYPE_SIZE (t2))
208 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
211 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
212 that we will not have any alignment problems since, if we did, the
213 non-BLKmode type could not have been used. */
214 if (TYPE_MODE (t1) != BLKmode)
217 /* If the rhs type is of constant size, use it whatever the modes. At
218 this point it is known to be smaller, or of constant size and the
220 if (TREE_CONSTANT (TYPE_SIZE (t2)))
223 /* Otherwise, if the rhs type is non-BLKmode, use it. */
224 if (TYPE_MODE (t2) != BLKmode)
227 /* In this case, both types have variable size and BLKmode. It's
228 probably best to leave the "type mismatch" because changing it
229 could cause a bad self-referential reference. */
233 /* Return an expression tree representing an equality comparison of A1 and A2,
234 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
236 Two arrays are equal in one of two ways: (1) if both have zero length in
237 some dimension (not necessarily the same dimension) or (2) if the lengths
238 in each dimension are equal and the data is equal. We perform the length
239 tests in as efficient a manner as possible. */
242 compare_arrays (tree result_type, tree a1, tree a2)
244 tree result = convert (result_type, boolean_true_node);
245 tree a1_is_null = convert (result_type, boolean_false_node);
246 tree a2_is_null = convert (result_type, boolean_false_node);
247 tree t1 = TREE_TYPE (a1);
248 tree t2 = TREE_TYPE (a2);
249 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
250 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
251 bool length_zero_p = false;
253 /* If either operand has side-effects, they have to be evaluated only once
254 in spite of the multiple references to the operand in the comparison. */
255 if (a1_side_effects_p)
256 a1 = gnat_protect_expr (a1);
258 if (a2_side_effects_p)
259 a2 = gnat_protect_expr (a2);
261 /* Process each dimension separately and compare the lengths. If any
262 dimension has a length known to be zero, set LENGTH_ZERO_P to true
263 in order to suppress the comparison of the data at the end. */
264 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
266 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
267 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
268 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
269 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
270 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
272 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
274 tree comparison, this_a1_is_null, this_a2_is_null;
276 /* If the length of the first array is a constant, swap our operands
277 unless the length of the second array is the constant zero. */
278 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
283 tem = a1, a1 = a2, a2 = tem;
284 tem = t1, t1 = t2, t2 = tem;
285 tem = lb1, lb1 = lb2, lb2 = tem;
286 tem = ub1, ub1 = ub2, ub2 = tem;
287 tem = length1, length1 = length2, length2 = tem;
288 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
289 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
290 a2_side_effects_p = btem;
293 /* If the length of the second array is the constant zero, we can just
294 use the original stored bounds for the first array and see whether
295 last < first holds. */
296 if (integer_zerop (length2))
298 length_zero_p = true;
300 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
301 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
303 comparison = build_binary_op (LT_EXPR, result_type, ub1, lb1);
304 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
305 if (EXPR_P (comparison))
306 SET_EXPR_LOCATION (comparison, input_location);
308 this_a1_is_null = comparison;
309 this_a2_is_null = convert (result_type, boolean_true_node);
312 /* Otherwise, if the length is some other constant value, we know that
313 this dimension in the second array cannot be superflat, so we can
314 just use its length computed from the actual stored bounds. */
315 else if (TREE_CODE (length2) == INTEGER_CST)
319 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
320 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
321 /* Note that we know that UB2 and LB2 are constant and hence
322 cannot contain a PLACEHOLDER_EXPR. */
323 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
324 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
325 bt = get_base_type (TREE_TYPE (ub1));
328 = build_binary_op (EQ_EXPR, result_type,
329 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
330 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
331 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
332 if (EXPR_P (comparison))
333 SET_EXPR_LOCATION (comparison, input_location);
335 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
336 if (EXPR_P (this_a1_is_null))
337 SET_EXPR_LOCATION (this_a1_is_null, input_location);
339 this_a2_is_null = convert (result_type, boolean_false_node);
342 /* Otherwise, compare the computed lengths. */
345 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
346 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
349 = build_binary_op (EQ_EXPR, result_type, length1, length2);
350 if (EXPR_P (comparison))
351 SET_EXPR_LOCATION (comparison, input_location);
353 /* If the length expression is of the form (cond ? val : 0), assume
354 that cond is equivalent to (length != 0). That's guaranteed by
355 construction of the array types in gnat_to_gnu_entity. */
356 if (TREE_CODE (length1) == COND_EXPR
357 && integer_zerop (TREE_OPERAND (length1, 2)))
358 this_a1_is_null = invert_truthvalue (TREE_OPERAND (length1, 0));
360 this_a1_is_null = build_binary_op (EQ_EXPR, result_type, length1,
362 if (EXPR_P (this_a1_is_null))
363 SET_EXPR_LOCATION (this_a1_is_null, input_location);
365 /* Likewise for the second array. */
366 if (TREE_CODE (length2) == COND_EXPR
367 && integer_zerop (TREE_OPERAND (length2, 2)))
368 this_a2_is_null = invert_truthvalue (TREE_OPERAND (length2, 0));
370 this_a2_is_null = build_binary_op (EQ_EXPR, result_type, length2,
372 if (EXPR_P (this_a2_is_null))
373 SET_EXPR_LOCATION (this_a2_is_null, input_location);
376 /* Append expressions for this dimension to the final expressions. */
377 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
380 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
381 this_a1_is_null, a1_is_null);
383 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
384 this_a2_is_null, a2_is_null);
390 /* Unless the length of some dimension is known to be zero, compare the
391 data in the array. */
394 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
399 a1 = convert (type, a1),
400 a2 = convert (type, a2);
403 comparison = fold_build2 (EQ_EXPR, result_type, a1, a2);
404 if (EXPR_P (comparison))
405 SET_EXPR_LOCATION (comparison, input_location);
408 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
411 /* The result is also true if both sizes are zero. */
412 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
413 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
414 a1_is_null, a2_is_null),
417 /* If either operand has side-effects, they have to be evaluated before
418 starting the comparison above since the place they would be otherwise
419 evaluated could be wrong. */
420 if (a1_side_effects_p)
421 result = build2 (COMPOUND_EXPR, result_type, a1, result);
423 if (a2_side_effects_p)
424 result = build2 (COMPOUND_EXPR, result_type, a2, result);
429 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
430 type TYPE. We know that TYPE is a modular type with a nonbinary
434 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
437 tree modulus = TYPE_MODULUS (type);
438 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
439 unsigned int precision;
440 bool unsignedp = true;
444 /* If this is an addition of a constant, convert it to a subtraction
445 of a constant since we can do that faster. */
446 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
448 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
449 op_code = MINUS_EXPR;
452 /* For the logical operations, we only need PRECISION bits. For
453 addition and subtraction, we need one more and for multiplication we
454 need twice as many. But we never want to make a size smaller than
456 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
457 needed_precision += 1;
458 else if (op_code == MULT_EXPR)
459 needed_precision *= 2;
461 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
463 /* Unsigned will do for everything but subtraction. */
464 if (op_code == MINUS_EXPR)
467 /* If our type is the wrong signedness or isn't wide enough, make a new
468 type and convert both our operands to it. */
469 if (TYPE_PRECISION (op_type) < precision
470 || TYPE_UNSIGNED (op_type) != unsignedp)
472 /* Copy the node so we ensure it can be modified to make it modular. */
473 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
474 modulus = convert (op_type, modulus);
475 SET_TYPE_MODULUS (op_type, modulus);
476 TYPE_MODULAR_P (op_type) = 1;
477 lhs = convert (op_type, lhs);
478 rhs = convert (op_type, rhs);
481 /* Do the operation, then we'll fix it up. */
482 result = fold_build2 (op_code, op_type, lhs, rhs);
484 /* For multiplication, we have no choice but to do a full modulus
485 operation. However, we want to do this in the narrowest
487 if (op_code == MULT_EXPR)
489 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
490 modulus = convert (div_type, modulus);
491 SET_TYPE_MODULUS (div_type, modulus);
492 TYPE_MODULAR_P (div_type) = 1;
493 result = convert (op_type,
494 fold_build2 (TRUNC_MOD_EXPR, div_type,
495 convert (div_type, result), modulus));
498 /* For subtraction, add the modulus back if we are negative. */
499 else if (op_code == MINUS_EXPR)
501 result = gnat_protect_expr (result);
502 result = fold_build3 (COND_EXPR, op_type,
503 fold_build2 (LT_EXPR, boolean_type_node, result,
504 convert (op_type, integer_zero_node)),
505 fold_build2 (PLUS_EXPR, op_type, result, modulus),
509 /* For the other operations, subtract the modulus if we are >= it. */
512 result = gnat_protect_expr (result);
513 result = fold_build3 (COND_EXPR, op_type,
514 fold_build2 (GE_EXPR, boolean_type_node,
516 fold_build2 (MINUS_EXPR, op_type,
521 return convert (type, result);
524 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
525 desired for the result. Usually the operation is to be performed
526 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
527 in which case the type to be used will be derived from the operands.
529 This function is very much unlike the ones for C and C++ since we
530 have already done any type conversion and matching required. All we
531 have to do here is validate the work done by SEM and handle subtypes. */
534 build_binary_op (enum tree_code op_code, tree result_type,
535 tree left_operand, tree right_operand)
537 tree left_type = TREE_TYPE (left_operand);
538 tree right_type = TREE_TYPE (right_operand);
539 tree left_base_type = get_base_type (left_type);
540 tree right_base_type = get_base_type (right_type);
541 tree operation_type = result_type;
542 tree best_type = NULL_TREE;
543 tree modulus, result;
544 bool has_side_effects = false;
547 && TREE_CODE (operation_type) == RECORD_TYPE
548 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
549 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
552 && !AGGREGATE_TYPE_P (operation_type)
553 && TYPE_EXTRA_SUBTYPE_P (operation_type))
554 operation_type = get_base_type (operation_type);
556 modulus = (operation_type
557 && TREE_CODE (operation_type) == INTEGER_TYPE
558 && TYPE_MODULAR_P (operation_type)
559 ? TYPE_MODULUS (operation_type) : NULL_TREE);
565 /* If there were integral or pointer conversions on the LHS, remove
566 them; we'll be putting them back below if needed. Likewise for
567 conversions between array and record types, except for justified
568 modular types. But don't do this if the right operand is not
569 BLKmode (for packed arrays) unless we are not changing the mode. */
570 while ((CONVERT_EXPR_P (left_operand)
571 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
572 && (((INTEGRAL_TYPE_P (left_type)
573 || POINTER_TYPE_P (left_type))
574 && (INTEGRAL_TYPE_P (TREE_TYPE
575 (TREE_OPERAND (left_operand, 0)))
576 || POINTER_TYPE_P (TREE_TYPE
577 (TREE_OPERAND (left_operand, 0)))))
578 || (((TREE_CODE (left_type) == RECORD_TYPE
579 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
580 || TREE_CODE (left_type) == ARRAY_TYPE)
581 && ((TREE_CODE (TREE_TYPE
582 (TREE_OPERAND (left_operand, 0)))
584 || (TREE_CODE (TREE_TYPE
585 (TREE_OPERAND (left_operand, 0)))
587 && (TYPE_MODE (right_type) == BLKmode
588 || (TYPE_MODE (left_type)
589 == TYPE_MODE (TREE_TYPE
591 (left_operand, 0))))))))
593 left_operand = TREE_OPERAND (left_operand, 0);
594 left_type = TREE_TYPE (left_operand);
597 /* If a class-wide type may be involved, force use of the RHS type. */
598 if ((TREE_CODE (right_type) == RECORD_TYPE
599 || TREE_CODE (right_type) == UNION_TYPE)
600 && TYPE_ALIGN_OK (right_type))
601 operation_type = right_type;
603 /* If we are copying between padded objects with compatible types, use
604 the padded view of the objects, this is very likely more efficient.
605 Likewise for a padded object that is assigned a constructor, if we
606 can convert the constructor to the inner type, to avoid putting a
607 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
608 actually copied anything. */
609 else if (TYPE_IS_PADDING_P (left_type)
610 && TREE_CONSTANT (TYPE_SIZE (left_type))
611 && ((TREE_CODE (right_operand) == COMPONENT_REF
613 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
614 && gnat_types_compatible_p
616 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
617 || (TREE_CODE (right_operand) == CONSTRUCTOR
618 && !CONTAINS_PLACEHOLDER_P
619 (DECL_SIZE (TYPE_FIELDS (left_type)))))
620 && !integer_zerop (TYPE_SIZE (right_type)))
621 operation_type = left_type;
623 /* Find the best type to use for copying between aggregate types. */
624 else if (((TREE_CODE (left_type) == ARRAY_TYPE
625 && TREE_CODE (right_type) == ARRAY_TYPE)
626 || (TREE_CODE (left_type) == RECORD_TYPE
627 && TREE_CODE (right_type) == RECORD_TYPE))
628 && (best_type = find_common_type (left_type, right_type)))
629 operation_type = best_type;
631 /* Otherwise use the LHS type. */
632 else if (!operation_type)
633 operation_type = left_type;
635 /* Ensure everything on the LHS is valid. If we have a field reference,
636 strip anything that get_inner_reference can handle. Then remove any
637 conversions between types having the same code and mode. And mark
638 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
639 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
640 result = left_operand;
643 tree restype = TREE_TYPE (result);
645 if (TREE_CODE (result) == COMPONENT_REF
646 || TREE_CODE (result) == ARRAY_REF
647 || TREE_CODE (result) == ARRAY_RANGE_REF)
648 while (handled_component_p (result))
649 result = TREE_OPERAND (result, 0);
650 else if (TREE_CODE (result) == REALPART_EXPR
651 || TREE_CODE (result) == IMAGPART_EXPR
652 || (CONVERT_EXPR_P (result)
653 && (((TREE_CODE (restype)
654 == TREE_CODE (TREE_TYPE
655 (TREE_OPERAND (result, 0))))
656 && (TYPE_MODE (TREE_TYPE
657 (TREE_OPERAND (result, 0)))
658 == TYPE_MODE (restype)))
659 || TYPE_ALIGN_OK (restype))))
660 result = TREE_OPERAND (result, 0);
661 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
663 TREE_ADDRESSABLE (result) = 1;
664 result = TREE_OPERAND (result, 0);
670 gcc_assert (TREE_CODE (result) == INDIRECT_REF
671 || TREE_CODE (result) == NULL_EXPR
674 /* Convert the right operand to the operation type unless it is
675 either already of the correct type or if the type involves a
676 placeholder, since the RHS may not have the same record type. */
677 if (operation_type != right_type
678 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
680 right_operand = convert (operation_type, right_operand);
681 right_type = operation_type;
684 /* If the left operand is not of the same type as the operation
685 type, wrap it up in a VIEW_CONVERT_EXPR. */
686 if (left_type != operation_type)
687 left_operand = unchecked_convert (operation_type, left_operand, false);
689 has_side_effects = true;
695 operation_type = TREE_TYPE (left_type);
697 /* ... fall through ... */
699 case ARRAY_RANGE_REF:
700 /* First look through conversion between type variants. Note that
701 this changes neither the operation type nor the type domain. */
702 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
703 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
704 == TYPE_MAIN_VARIANT (left_type))
706 left_operand = TREE_OPERAND (left_operand, 0);
707 left_type = TREE_TYPE (left_operand);
710 /* For a range, make sure the element type is consistent. */
711 if (op_code == ARRAY_RANGE_REF
712 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
713 operation_type = build_array_type (TREE_TYPE (left_type),
714 TYPE_DOMAIN (operation_type));
716 /* Then convert the right operand to its base type. This will prevent
717 unneeded sign conversions when sizetype is wider than integer. */
718 right_operand = convert (right_base_type, right_operand);
719 right_operand = convert (sizetype, right_operand);
721 if (!TREE_CONSTANT (right_operand)
722 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
723 gnat_mark_addressable (left_operand);
728 case TRUTH_ANDIF_EXPR:
729 case TRUTH_ORIF_EXPR:
733 #ifdef ENABLE_CHECKING
734 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
736 operation_type = left_base_type;
737 left_operand = convert (operation_type, left_operand);
738 right_operand = convert (operation_type, right_operand);
747 #ifdef ENABLE_CHECKING
748 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
750 /* If either operand is a NULL_EXPR, just return a new one. */
751 if (TREE_CODE (left_operand) == NULL_EXPR)
752 return build2 (op_code, result_type,
753 build1 (NULL_EXPR, integer_type_node,
754 TREE_OPERAND (left_operand, 0)),
757 else if (TREE_CODE (right_operand) == NULL_EXPR)
758 return build2 (op_code, result_type,
759 build1 (NULL_EXPR, integer_type_node,
760 TREE_OPERAND (right_operand, 0)),
763 /* If either object is a justified modular types, get the
764 fields from within. */
765 if (TREE_CODE (left_type) == RECORD_TYPE
766 && TYPE_JUSTIFIED_MODULAR_P (left_type))
768 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
770 left_type = TREE_TYPE (left_operand);
771 left_base_type = get_base_type (left_type);
774 if (TREE_CODE (right_type) == RECORD_TYPE
775 && TYPE_JUSTIFIED_MODULAR_P (right_type))
777 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
779 right_type = TREE_TYPE (right_operand);
780 right_base_type = get_base_type (right_type);
783 /* If both objects are arrays, compare them specially. */
784 if ((TREE_CODE (left_type) == ARRAY_TYPE
785 || (TREE_CODE (left_type) == INTEGER_TYPE
786 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
787 && (TREE_CODE (right_type) == ARRAY_TYPE
788 || (TREE_CODE (right_type) == INTEGER_TYPE
789 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
791 result = compare_arrays (result_type, left_operand, right_operand);
793 if (op_code == NE_EXPR)
794 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
796 gcc_assert (op_code == EQ_EXPR);
801 /* Otherwise, the base types must be the same, unless they are both fat
802 pointer types or record types. In the latter case, use the best type
803 and convert both operands to that type. */
804 if (left_base_type != right_base_type)
806 if (TYPE_IS_FAT_POINTER_P (left_base_type)
807 && TYPE_IS_FAT_POINTER_P (right_base_type))
809 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
810 == TYPE_MAIN_VARIANT (right_base_type));
811 best_type = left_base_type;
814 else if (TREE_CODE (left_base_type) == RECORD_TYPE
815 && TREE_CODE (right_base_type) == RECORD_TYPE)
817 /* The only way this is permitted is if both types have the same
818 name. In that case, one of them must not be self-referential.
819 Use it as the best type. Even better with a fixed size. */
820 gcc_assert (TYPE_NAME (left_base_type)
821 && TYPE_NAME (left_base_type)
822 == TYPE_NAME (right_base_type));
824 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
825 best_type = left_base_type;
826 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
827 best_type = right_base_type;
828 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
829 best_type = left_base_type;
830 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
831 best_type = right_base_type;
839 left_operand = convert (best_type, left_operand);
840 right_operand = convert (best_type, right_operand);
844 left_operand = convert (left_base_type, left_operand);
845 right_operand = convert (right_base_type, right_operand);
848 /* If we are comparing a fat pointer against zero, we just need to
849 compare the data pointer. */
850 if (TYPE_IS_FAT_POINTER_P (left_base_type)
851 && TREE_CODE (right_operand) == CONSTRUCTOR
852 && integer_zerop (VEC_index (constructor_elt,
853 CONSTRUCTOR_ELTS (right_operand),
857 = build_component_ref (left_operand, NULL_TREE,
858 TYPE_FIELDS (left_base_type), false);
860 = convert (TREE_TYPE (left_operand), integer_zero_node);
870 /* The RHS of a shift can be any type. Also, ignore any modulus
871 (we used to abort, but this is needed for unchecked conversion
872 to modular types). Otherwise, processing is the same as normal. */
873 gcc_assert (operation_type == left_base_type);
875 left_operand = convert (operation_type, left_operand);
881 /* For binary modulus, if the inputs are in range, so are the
883 if (modulus && integer_pow2p (modulus))
888 gcc_assert (TREE_TYPE (result_type) == left_base_type
889 && TREE_TYPE (result_type) == right_base_type);
890 left_operand = convert (left_base_type, left_operand);
891 right_operand = convert (right_base_type, right_operand);
894 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
895 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
896 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
897 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
898 /* These always produce results lower than either operand. */
902 case POINTER_PLUS_EXPR:
903 gcc_assert (operation_type == left_base_type
904 && sizetype == right_base_type);
905 left_operand = convert (operation_type, left_operand);
906 right_operand = convert (sizetype, right_operand);
909 case PLUS_NOMOD_EXPR:
910 case MINUS_NOMOD_EXPR:
911 if (op_code == PLUS_NOMOD_EXPR)
914 op_code = MINUS_EXPR;
917 /* ... fall through ... */
921 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
922 other compilers. Contrary to C, Ada doesn't allow arithmetics in
923 these types but can generate addition/subtraction for Succ/Pred. */
925 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
926 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
927 operation_type = left_base_type = right_base_type
928 = gnat_type_for_mode (TYPE_MODE (operation_type),
929 TYPE_UNSIGNED (operation_type));
931 /* ... fall through ... */
935 /* The result type should be the same as the base types of the
936 both operands (and they should be the same). Convert
937 everything to the result type. */
939 gcc_assert (operation_type == left_base_type
940 && left_base_type == right_base_type);
941 left_operand = convert (operation_type, left_operand);
942 right_operand = convert (operation_type, right_operand);
945 if (modulus && !integer_pow2p (modulus))
947 result = nonbinary_modular_operation (op_code, operation_type,
948 left_operand, right_operand);
951 /* If either operand is a NULL_EXPR, just return a new one. */
952 else if (TREE_CODE (left_operand) == NULL_EXPR)
953 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
954 else if (TREE_CODE (right_operand) == NULL_EXPR)
955 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
956 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
957 result = fold (build4 (op_code, operation_type, left_operand,
958 right_operand, NULL_TREE, NULL_TREE));
961 = fold_build2 (op_code, operation_type, left_operand, right_operand);
963 TREE_SIDE_EFFECTS (result) |= has_side_effects;
964 TREE_CONSTANT (result)
965 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
966 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
968 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
969 && TYPE_VOLATILE (operation_type))
970 TREE_THIS_VOLATILE (result) = 1;
972 /* If we are working with modular types, perform the MOD operation
973 if something above hasn't eliminated the need for it. */
975 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
976 convert (operation_type, modulus));
978 if (result_type && result_type != operation_type)
979 result = convert (result_type, result);
984 /* Similar, but for unary operations. */
987 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
989 tree type = TREE_TYPE (operand);
990 tree base_type = get_base_type (type);
991 tree operation_type = result_type;
993 bool side_effects = false;
996 && TREE_CODE (operation_type) == RECORD_TYPE
997 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
998 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1001 && !AGGREGATE_TYPE_P (operation_type)
1002 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1003 operation_type = get_base_type (operation_type);
1009 if (!operation_type)
1010 result_type = operation_type = TREE_TYPE (type);
1012 gcc_assert (result_type == TREE_TYPE (type));
1014 result = fold_build1 (op_code, operation_type, operand);
1017 case TRUTH_NOT_EXPR:
1018 #ifdef ENABLE_CHECKING
1019 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1021 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1024 case ATTR_ADDR_EXPR:
1026 switch (TREE_CODE (operand))
1029 case UNCONSTRAINED_ARRAY_REF:
1030 result = TREE_OPERAND (operand, 0);
1032 /* Make sure the type here is a pointer, not a reference.
1033 GCC wants pointer types for function addresses. */
1035 result_type = build_pointer_type (type);
1037 /* If the underlying object can alias everything, propagate the
1038 property since we are effectively retrieving the object. */
1039 if (POINTER_TYPE_P (TREE_TYPE (result))
1040 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1042 if (TREE_CODE (result_type) == POINTER_TYPE
1043 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1045 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1046 TYPE_MODE (result_type),
1048 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1049 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1051 = build_reference_type_for_mode (TREE_TYPE (result_type),
1052 TYPE_MODE (result_type),
1059 TREE_TYPE (result) = type = build_pointer_type (type);
1063 /* Fold a compound expression if it has unconstrained array type
1064 since the middle-end cannot handle it. But we don't it in the
1065 general case because it may introduce aliasing issues if the
1066 first operand is an indirect assignment and the second operand
1067 the corresponding address, e.g. for an allocator. */
1068 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1070 result = build_unary_op (ADDR_EXPR, result_type,
1071 TREE_OPERAND (operand, 1));
1072 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1073 TREE_OPERAND (operand, 0), result);
1079 case ARRAY_RANGE_REF:
1082 /* If this is for 'Address, find the address of the prefix and add
1083 the offset to the field. Otherwise, do this the normal way. */
1084 if (op_code == ATTR_ADDR_EXPR)
1086 HOST_WIDE_INT bitsize;
1087 HOST_WIDE_INT bitpos;
1089 enum machine_mode mode;
1090 int unsignedp, volatilep;
1092 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1093 &mode, &unsignedp, &volatilep,
1096 /* If INNER is a padding type whose field has a self-referential
1097 size, convert to that inner type. We know the offset is zero
1098 and we need to have that type visible. */
1099 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1100 && CONTAINS_PLACEHOLDER_P
1101 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1102 (TREE_TYPE (inner))))))
1103 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1106 /* Compute the offset as a byte offset from INNER. */
1108 offset = size_zero_node;
1110 offset = size_binop (PLUS_EXPR, offset,
1111 size_int (bitpos / BITS_PER_UNIT));
1113 /* Take the address of INNER, convert the offset to void *, and
1114 add then. It will later be converted to the desired result
1116 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1117 inner = convert (ptr_void_type_node, inner);
1118 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1120 result = convert (build_pointer_type (TREE_TYPE (operand)),
1127 /* If this is just a constructor for a padded record, we can
1128 just take the address of the single field and convert it to
1129 a pointer to our type. */
1130 if (TYPE_IS_PADDING_P (type))
1132 result = VEC_index (constructor_elt,
1133 CONSTRUCTOR_ELTS (operand),
1135 result = convert (build_pointer_type (TREE_TYPE (operand)),
1136 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1143 if (AGGREGATE_TYPE_P (type)
1144 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1145 return build_unary_op (ADDR_EXPR, result_type,
1146 TREE_OPERAND (operand, 0));
1148 /* ... fallthru ... */
1150 case VIEW_CONVERT_EXPR:
1151 /* If this just a variant conversion or if the conversion doesn't
1152 change the mode, get the result type from this type and go down.
1153 This is needed for conversions of CONST_DECLs, to eventually get
1154 to the address of their CORRESPONDING_VARs. */
1155 if ((TYPE_MAIN_VARIANT (type)
1156 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1157 || (TYPE_MODE (type) != BLKmode
1158 && (TYPE_MODE (type)
1159 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1160 return build_unary_op (ADDR_EXPR,
1161 (result_type ? result_type
1162 : build_pointer_type (type)),
1163 TREE_OPERAND (operand, 0));
1167 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1169 /* ... fall through ... */
1174 /* If we are taking the address of a padded record whose field is
1175 contains a template, take the address of the template. */
1176 if (TYPE_IS_PADDING_P (type)
1177 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1178 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1180 type = TREE_TYPE (TYPE_FIELDS (type));
1181 operand = convert (type, operand);
1184 gnat_mark_addressable (operand);
1185 result = build_fold_addr_expr (operand);
1188 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1192 /* If we want to refer to an unconstrained array, use the appropriate
1193 expression to do so. This will never survive down to the back-end.
1194 But if TYPE is a thin pointer, first convert to a fat pointer. */
1195 if (TYPE_IS_THIN_POINTER_P (type)
1196 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1199 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1201 type = TREE_TYPE (operand);
1204 if (TYPE_IS_FAT_POINTER_P (type))
1206 result = build1 (UNCONSTRAINED_ARRAY_REF,
1207 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1208 TREE_READONLY (result)
1209 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1212 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1213 else if (TREE_CODE (operand) == ADDR_EXPR)
1214 result = TREE_OPERAND (operand, 0);
1216 /* Otherwise, build and fold the indirect reference. */
1219 result = build_fold_indirect_ref (operand);
1220 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1224 = (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1230 tree modulus = ((operation_type
1231 && TREE_CODE (operation_type) == INTEGER_TYPE
1232 && TYPE_MODULAR_P (operation_type))
1233 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1234 int mod_pow2 = modulus && integer_pow2p (modulus);
1236 /* If this is a modular type, there are various possibilities
1237 depending on the operation and whether the modulus is a
1238 power of two or not. */
1242 gcc_assert (operation_type == base_type);
1243 operand = convert (operation_type, operand);
1245 /* The fastest in the negate case for binary modulus is
1246 the straightforward code; the TRUNC_MOD_EXPR below
1247 is an AND operation. */
1248 if (op_code == NEGATE_EXPR && mod_pow2)
1249 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1250 fold_build1 (NEGATE_EXPR, operation_type,
1254 /* For nonbinary negate case, return zero for zero operand,
1255 else return the modulus minus the operand. If the modulus
1256 is a power of two minus one, we can do the subtraction
1257 as an XOR since it is equivalent and faster on most machines. */
1258 else if (op_code == NEGATE_EXPR && !mod_pow2)
1260 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1262 convert (operation_type,
1263 integer_one_node))))
1264 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1267 result = fold_build2 (MINUS_EXPR, operation_type,
1270 result = fold_build3 (COND_EXPR, operation_type,
1271 fold_build2 (NE_EXPR,
1276 integer_zero_node)),
1281 /* For the NOT cases, we need a constant equal to
1282 the modulus minus one. For a binary modulus, we
1283 XOR against the constant and subtract the operand from
1284 that constant for nonbinary modulus. */
1286 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1287 convert (operation_type,
1291 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1294 result = fold_build2 (MINUS_EXPR, operation_type,
1302 /* ... fall through ... */
1305 gcc_assert (operation_type == base_type);
1306 result = fold_build1 (op_code, operation_type,
1307 convert (operation_type, operand));
1312 TREE_SIDE_EFFECTS (result) = 1;
1313 if (TREE_CODE (result) == INDIRECT_REF)
1314 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1317 if (result_type && TREE_TYPE (result) != result_type)
1318 result = convert (result_type, result);
1323 /* Similar, but for COND_EXPR. */
1326 build_cond_expr (tree result_type, tree condition_operand,
1327 tree true_operand, tree false_operand)
1329 bool addr_p = false;
1332 /* The front-end verified that result, true and false operands have
1333 same base type. Convert everything to the result type. */
1334 true_operand = convert (result_type, true_operand);
1335 false_operand = convert (result_type, false_operand);
1337 /* If the result type is unconstrained, take the address of the operands and
1338 then dereference the result. Likewise if the result type is passed by
1339 reference, but this is natively handled in the gimplifier. */
1340 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1341 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1343 result_type = build_pointer_type (result_type);
1344 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1345 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1349 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1350 true_operand, false_operand);
1352 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1353 in both arms, make sure it gets evaluated by moving it ahead of the
1354 conditional expression. This is necessary because it is evaluated
1355 in only one place at run time and would otherwise be uninitialized
1356 in one of the arms. */
1357 true_operand = skip_simple_arithmetic (true_operand);
1358 false_operand = skip_simple_arithmetic (false_operand);
1360 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1361 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1364 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1369 /* Similar, but for RETURN_EXPR. If RET_VAL is non-null, build a RETURN_EXPR
1370 around the assignment of RET_VAL to RET_OBJ. Otherwise just build a bare
1371 RETURN_EXPR around RESULT_OBJ, which may be null in this case. */
1374 build_return_expr (tree ret_obj, tree ret_val)
1380 /* The gimplifier explicitly enforces the following invariant:
1389 As a consequence, type consistency dictates that we use the type
1390 of the RET_OBJ as the operation type. */
1391 tree operation_type = TREE_TYPE (ret_obj);
1393 /* Convert the right operand to the operation type. Note that it's the
1394 same transformation as in the MODIFY_EXPR case of build_binary_op,
1395 with the assumption that the type cannot involve a placeholder. */
1396 if (operation_type != TREE_TYPE (ret_val))
1397 ret_val = convert (operation_type, ret_val);
1399 result_expr = build2 (MODIFY_EXPR, operation_type, ret_obj, ret_val);
1402 result_expr = ret_obj;
1404 return build1 (RETURN_EXPR, void_type_node, result_expr);
1407 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1411 build_call_1_expr (tree fundecl, tree arg)
1413 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1414 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1416 TREE_SIDE_EFFECTS (call) = 1;
1420 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1424 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1426 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1427 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1429 TREE_SIDE_EFFECTS (call) = 1;
1433 /* Likewise to call FUNDECL with no arguments. */
1436 build_call_0_expr (tree fundecl)
1438 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1439 it possible to propagate DECL_IS_PURE on parameterless functions. */
1440 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1441 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1446 /* Call a function that raises an exception and pass the line number and file
1447 name, if requested. MSG says which exception function to call.
1449 GNAT_NODE is the gnat node conveying the source location for which the
1450 error should be signaled, or Empty in which case the error is signaled on
1451 the current ref_file_name/input_line.
1453 KIND says which kind of exception this is for
1454 (N_Raise_{Constraint,Storage,Program}_Error). */
1457 build_call_raise (int msg, Node_Id gnat_node, char kind)
1459 tree fndecl = gnat_raise_decls[msg];
1460 tree label = get_exception_label (kind);
1466 /* If this is to be done as a goto, handle that case. */
1469 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1470 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1472 /* If Local_Raise is present, generate
1473 Local_Raise (exception'Identity); */
1474 if (Present (local_raise))
1476 tree gnu_local_raise
1477 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1478 tree gnu_exception_entity
1479 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1481 = build_call_1_expr (gnu_local_raise,
1482 build_unary_op (ADDR_EXPR, NULL_TREE,
1483 gnu_exception_entity));
1485 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1486 gnu_call, gnu_result);}
1492 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1494 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1495 ? IDENTIFIER_POINTER
1496 (get_identifier (Get_Name_String
1498 (Get_Source_File_Index (Sloc (gnat_node))))))
1502 filename = build_string (len, str);
1504 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1505 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1507 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1508 build_index_type (size_int (len)));
1511 build_call_2_expr (fndecl,
1513 build_pointer_type (unsigned_char_type_node),
1515 build_int_cst (NULL_TREE, line_number));
1518 /* qsort comparer for the bit positions of two constructor elements
1519 for record components. */
1522 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1524 const constructor_elt * const elmt1 = (const constructor_elt const *) rt1;
1525 const constructor_elt * const elmt2 = (const constructor_elt const *) rt2;
1526 const_tree const field1 = elmt1->index;
1527 const_tree const field2 = elmt2->index;
1529 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1531 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1534 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1537 gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v)
1539 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1540 bool side_effects = false;
1541 tree result, obj, val;
1542 unsigned int n_elmts;
1544 /* Scan the elements to see if they are all constant or if any has side
1545 effects, to let us set global flags on the resulting constructor. Count
1546 the elements along the way for possible sorting purposes below. */
1547 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1549 /* The predicate must be in keeping with output_constructor. */
1550 if (!TREE_CONSTANT (val)
1551 || (TREE_CODE (type) == RECORD_TYPE
1552 && CONSTRUCTOR_BITFIELD_P (obj)
1553 && !initializer_constant_valid_for_bitfield_p (val))
1554 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1555 allconstant = false;
1557 if (TREE_SIDE_EFFECTS (val))
1558 side_effects = true;
1561 /* For record types with constant components only, sort field list
1562 by increasing bit position. This is necessary to ensure the
1563 constructor can be output as static data. */
1564 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1565 qsort (VEC_address (constructor_elt, v), n_elmts,
1566 sizeof (constructor_elt), compare_elmt_bitpos);
1568 result = build_constructor (type, v);
1569 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1570 TREE_SIDE_EFFECTS (result) = side_effects;
1571 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1575 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1576 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1577 for the field. Don't fold the result if NO_FOLD_P is true.
1579 We also handle the fact that we might have been passed a pointer to the
1580 actual record and know how to look for fields in variant parts. */
1583 build_simple_component_ref (tree record_variable, tree component,
1584 tree field, bool no_fold_p)
1586 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1587 tree ref, inner_variable;
1589 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1590 || TREE_CODE (record_type) == UNION_TYPE
1591 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1592 && TYPE_SIZE (record_type)
1593 && (component != 0) != (field != 0));
1595 /* If no field was specified, look for a field with the specified name
1596 in the current record only. */
1598 for (field = TYPE_FIELDS (record_type); field;
1599 field = TREE_CHAIN (field))
1600 if (DECL_NAME (field) == component)
1606 /* If this field is not in the specified record, see if we can find a field
1607 in the specified record whose original field is the same as this one. */
1608 if (DECL_CONTEXT (field) != record_type)
1612 /* First loop thru normal components. */
1613 for (new_field = TYPE_FIELDS (record_type); new_field;
1614 new_field = DECL_CHAIN (new_field))
1615 if (SAME_FIELD_P (field, new_field))
1618 /* Next, see if we're looking for an inherited component in an extension.
1619 If so, look thru the extension directly. */
1621 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1622 && TYPE_ALIGN_OK (record_type)
1623 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1625 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1627 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1628 NULL_TREE, field, no_fold_p);
1633 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1634 the component in the first search. Doing this search in 2 steps
1635 is required to avoiding hidden homonymous fields in the
1638 for (new_field = TYPE_FIELDS (record_type); new_field;
1639 new_field = DECL_CHAIN (new_field))
1640 if (DECL_INTERNAL_P (new_field))
1643 = build_simple_component_ref (record_variable,
1644 NULL_TREE, new_field, no_fold_p);
1645 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1658 /* If the field's offset has overflowed, do not attempt to access it
1659 as doing so may trigger sanity checks deeper in the back-end.
1660 Note that we don't need to warn since this will be done on trying
1661 to declare the object. */
1662 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1663 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1666 /* Look through conversion between type variants. Note that this
1667 is transparent as far as the field is concerned. */
1668 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1669 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1671 inner_variable = TREE_OPERAND (record_variable, 0);
1673 inner_variable = record_variable;
1675 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1678 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1679 TREE_READONLY (ref) = 1;
1680 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1681 || TYPE_VOLATILE (record_type))
1682 TREE_THIS_VOLATILE (ref) = 1;
1687 /* The generic folder may punt in this case because the inner array type
1688 can be self-referential, but folding is in fact not problematic. */
1689 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1690 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1692 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1693 unsigned HOST_WIDE_INT idx;
1695 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1705 /* Like build_simple_component_ref, except that we give an error if the
1706 reference could not be found. */
1709 build_component_ref (tree record_variable, tree component,
1710 tree field, bool no_fold_p)
1712 tree ref = build_simple_component_ref (record_variable, component, field,
1718 /* If FIELD was specified, assume this is an invalid user field so raise
1719 Constraint_Error. Otherwise, we have no type to return so abort. */
1721 return build1 (NULL_EXPR, TREE_TYPE (field),
1722 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1723 N_Raise_Constraint_Error));
1726 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1727 identically. Process the case where a GNAT_PROC to call is provided. */
1730 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1731 Entity_Id gnat_proc, Entity_Id gnat_pool)
1733 tree gnu_proc = gnat_to_gnu (gnat_proc);
1734 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1737 /* The storage pools are obviously always tagged types, but the
1738 secondary stack uses the same mechanism and is not tagged. */
1739 if (Is_Tagged_Type (Etype (gnat_pool)))
1741 /* The size is the third parameter; the alignment is the
1743 Entity_Id gnat_size_type
1744 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1745 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1747 tree gnu_pool = gnat_to_gnu (gnat_pool);
1748 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1749 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1751 gnu_size = convert (gnu_size_type, gnu_size);
1752 gnu_align = convert (gnu_size_type, gnu_align);
1754 /* The first arg is always the address of the storage pool; next
1755 comes the address of the object, for a deallocator, then the
1756 size and alignment. */
1758 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1759 gnu_proc_addr, 4, gnu_pool_addr,
1760 gnu_obj, gnu_size, gnu_align);
1762 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1763 gnu_proc_addr, 3, gnu_pool_addr,
1764 gnu_size, gnu_align);
1767 /* Secondary stack case. */
1770 /* The size is the second parameter. */
1771 Entity_Id gnat_size_type
1772 = Etype (Next_Formal (First_Formal (gnat_proc)));
1773 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1775 gnu_size = convert (gnu_size_type, gnu_size);
1777 /* The first arg is the address of the object, for a deallocator,
1780 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1781 gnu_proc_addr, 2, gnu_obj, gnu_size);
1783 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1784 gnu_proc_addr, 1, gnu_size);
1787 TREE_SIDE_EFFECTS (gnu_call) = 1;
1791 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
1792 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1793 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1797 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1799 /* When the DATA_TYPE alignment is stricter than what malloc offers
1800 (super-aligned case), we allocate an "aligning" wrapper type and return
1801 the address of its single data field with the malloc's return value
1802 stored just in front. */
1804 unsigned int data_align = TYPE_ALIGN (data_type);
1805 unsigned int default_allocator_alignment
1806 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1809 = ((data_align > default_allocator_alignment)
1810 ? make_aligning_type (data_type, data_align, data_size,
1811 default_allocator_alignment,
1812 POINTER_SIZE / BITS_PER_UNIT)
1816 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1820 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
1821 Convention C, allocate 32-bit memory. */
1822 if (TARGET_ABI_OPEN_VMS
1823 && (POINTER_SIZE == 64
1824 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1825 || Convention (Etype (gnat_node)) == Convention_C)))
1826 malloc_ptr = build_call_1_expr (malloc32_decl, size_to_malloc);
1828 malloc_ptr = build_call_1_expr (malloc_decl, size_to_malloc);
1832 /* Latch malloc's return value and get a pointer to the aligning field
1834 tree storage_ptr = gnat_protect_expr (malloc_ptr);
1836 tree aligning_record_addr
1837 = convert (build_pointer_type (aligning_type), storage_ptr);
1839 tree aligning_record
1840 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1843 = build_component_ref (aligning_record, NULL_TREE,
1844 TYPE_FIELDS (aligning_type), false);
1846 tree aligning_field_addr
1847 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1849 /* Then arrange to store the allocator's return value ahead
1851 tree storage_ptr_slot_addr
1852 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1853 convert (ptr_void_type_node, aligning_field_addr),
1854 size_int (-(HOST_WIDE_INT) POINTER_SIZE
1857 tree storage_ptr_slot
1858 = build_unary_op (INDIRECT_REF, NULL_TREE,
1859 convert (build_pointer_type (ptr_void_type_node),
1860 storage_ptr_slot_addr));
1863 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
1864 build_binary_op (MODIFY_EXPR, NULL_TREE,
1865 storage_ptr_slot, storage_ptr),
1866 aligning_field_addr);
1872 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
1873 designated by DATA_PTR using the __gnat_free entry point. */
1876 maybe_wrap_free (tree data_ptr, tree data_type)
1878 /* In the regular alignment case, we pass the data pointer straight to free.
1879 In the superaligned case, we need to retrieve the initial allocator
1880 return value, stored in front of the data block at allocation time. */
1882 unsigned int data_align = TYPE_ALIGN (data_type);
1883 unsigned int default_allocator_alignment
1884 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1888 if (data_align > default_allocator_alignment)
1890 /* DATA_FRONT_PTR (void *)
1891 = (void *)DATA_PTR - (void *)sizeof (void *)) */
1894 (POINTER_PLUS_EXPR, ptr_void_type_node,
1895 convert (ptr_void_type_node, data_ptr),
1896 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
1898 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
1901 (INDIRECT_REF, NULL_TREE,
1902 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
1905 free_ptr = data_ptr;
1907 return build_call_1_expr (free_decl, free_ptr);
1910 /* Build a GCC tree to call an allocation or deallocation function.
1911 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1912 generate an allocator.
1914 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
1915 object type, used to determine the to-be-honored address alignment.
1916 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
1917 pool to use. If not present, malloc and free are used. GNAT_NODE is used
1918 to provide an error location for restriction violation messages. */
1921 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
1922 Entity_Id gnat_proc, Entity_Id gnat_pool,
1925 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1927 /* Explicit proc to call ? This one is assumed to deal with the type
1928 alignment constraints. */
1929 if (Present (gnat_proc))
1930 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
1931 gnat_proc, gnat_pool);
1933 /* Otherwise, object to "free" or "malloc" with possible special processing
1934 for alignments stricter than what the default allocator honors. */
1936 return maybe_wrap_free (gnu_obj, gnu_type);
1939 /* Assert that we no longer can be called with this special pool. */
1940 gcc_assert (gnat_pool != -1);
1942 /* Check that we aren't violating the associated restriction. */
1943 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
1944 Check_No_Implicit_Heap_Alloc (gnat_node);
1946 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
1950 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1951 initial value is INIT, if INIT is nonzero. Convert the expression to
1952 RESULT_TYPE, which must be some type of pointer. Return the tree.
1954 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1955 the storage pool to use. GNAT_NODE is used to provide an error
1956 location for restriction violation messages. If IGNORE_INIT_TYPE is
1957 true, ignore the type of INIT for the purpose of determining the size;
1958 this will cause the maximum size to be allocated if TYPE is of
1959 self-referential size. */
1962 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1963 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1965 tree size = TYPE_SIZE_UNIT (type);
1968 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1969 if (init && TREE_CODE (init) == NULL_EXPR)
1970 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1972 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1973 sizes of the object and its template. Allocate the whole thing and
1974 fill in the parts that are known. */
1975 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
1978 = build_unc_object_type_from_ptr (result_type, type,
1979 get_identifier ("ALLOC"), false);
1980 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
1981 tree storage_ptr_type = build_pointer_type (storage_type);
1984 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1987 /* If the size overflows, pass -1 so the allocator will raise
1989 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1990 size = ssize_int (-1);
1992 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
1993 gnat_proc, gnat_pool, gnat_node);
1994 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
1996 if (TYPE_IS_PADDING_P (type))
1998 type = TREE_TYPE (TYPE_FIELDS (type));
2000 init = convert (type, init);
2003 /* If there is an initializing expression, make a constructor for
2004 the entire object including the bounds and copy it into the
2005 object. If there is no initializing expression, just set the
2009 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
2011 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2012 build_template (template_type, type, init));
2013 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2018 build2 (COMPOUND_EXPR, storage_ptr_type,
2020 (MODIFY_EXPR, storage_type,
2021 build_unary_op (INDIRECT_REF, NULL_TREE,
2022 convert (storage_ptr_type, storage)),
2023 gnat_build_constructor (storage_type, v)),
2024 convert (storage_ptr_type, storage)));
2028 (COMPOUND_EXPR, result_type,
2030 (MODIFY_EXPR, template_type,
2032 (build_unary_op (INDIRECT_REF, NULL_TREE,
2033 convert (storage_ptr_type, storage)),
2034 NULL_TREE, TYPE_FIELDS (storage_type), false),
2035 build_template (template_type, type, NULL_TREE)),
2036 convert (result_type, convert (storage_ptr_type, storage)));
2039 /* If we have an initializing expression, see if its size is simpler
2040 than the size from the type. */
2041 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2042 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2043 || CONTAINS_PLACEHOLDER_P (size)))
2044 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2046 /* If the size is still self-referential, reference the initializing
2047 expression, if it is present. If not, this must have been a
2048 call to allocate a library-level object, in which case we use
2049 the maximum size. */
2050 if (CONTAINS_PLACEHOLDER_P (size))
2052 if (!ignore_init_type && init)
2053 size = substitute_placeholder_in_expr (size, init);
2055 size = max_size (size, true);
2058 /* If the size overflows, pass -1 so the allocator will raise
2060 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2061 size = ssize_int (-1);
2063 result = convert (result_type,
2064 build_call_alloc_dealloc (NULL_TREE, size, type,
2065 gnat_proc, gnat_pool,
2068 /* If we have an initial value, protect the new address, assign the value
2069 and return the address with a COMPOUND_EXPR. */
2072 result = gnat_protect_expr (result);
2074 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2076 (MODIFY_EXPR, NULL_TREE,
2077 build_unary_op (INDIRECT_REF,
2078 TREE_TYPE (TREE_TYPE (result)), result),
2083 return convert (result_type, result);
2086 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2087 GNAT_FORMAL is how we find the descriptor record. GNAT_ACTUAL is
2088 how we derive the source location to raise C_E on an out of range
2092 fill_vms_descriptor (tree expr, Entity_Id gnat_formal, Node_Id gnat_actual)
2094 tree parm_decl = get_gnu_tree (gnat_formal);
2095 tree record_type = TREE_TYPE (TREE_TYPE (parm_decl));
2097 const bool do_range_check
2099 IDENTIFIER_POINTER (DECL_NAME (TYPE_FIELDS (record_type))));
2100 VEC(constructor_elt,gc) *v = NULL;
2102 expr = maybe_unconstrained_array (expr);
2103 gnat_mark_addressable (expr);
2105 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2107 tree conexpr = convert (TREE_TYPE (field),
2108 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2109 (DECL_INITIAL (field), expr));
2111 /* Check to ensure that only 32-bit pointers are passed in
2112 32-bit descriptors */
2114 && strcmp (IDENTIFIER_POINTER (DECL_NAME (field)), "POINTER") == 0)
2117 = build_pointer_type_for_mode (void_type_node, DImode, false);
2118 tree addr64expr = build_unary_op (ADDR_EXPR, pointer64type, expr);
2120 = build_int_cstu (long_integer_type_node, 0x80000000);
2122 add_stmt (build3 (COND_EXPR, void_type_node,
2123 build_binary_op (GE_EXPR, boolean_type_node,
2124 convert (long_integer_type_node,
2127 build_call_raise (CE_Range_Check_Failed,
2129 N_Raise_Constraint_Error),
2132 CONSTRUCTOR_APPEND_ELT (v, field, conexpr);
2135 return gnat_build_constructor (record_type, v);
2138 /* Indicate that we need to take the address of T and that it therefore
2139 should not be allocated in a register. Returns true if successful. */
2142 gnat_mark_addressable (tree t)
2145 switch (TREE_CODE (t))
2150 case ARRAY_RANGE_REF:
2153 case VIEW_CONVERT_EXPR:
2154 case NON_LVALUE_EXPR:
2156 t = TREE_OPERAND (t, 0);
2160 t = TREE_OPERAND (t, 1);
2164 TREE_ADDRESSABLE (t) = 1;
2170 TREE_ADDRESSABLE (t) = 1;
2174 TREE_ADDRESSABLE (t) = 1;
2178 return DECL_CONST_CORRESPONDING_VAR (t)
2179 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2186 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2187 but we know how to handle our own nodes. */
2190 gnat_save_expr (tree exp)
2192 tree type = TREE_TYPE (exp);
2193 enum tree_code code = TREE_CODE (exp);
2195 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2198 if (code == UNCONSTRAINED_ARRAY_REF)
2200 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2201 TREE_READONLY (t) = TYPE_READONLY (type);
2205 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2206 This may be more efficient, but will also allow us to more easily find
2207 the match for the PLACEHOLDER_EXPR. */
2208 if (code == COMPONENT_REF
2209 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2210 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2211 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2213 return save_expr (exp);
2216 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2217 is optimized under the assumption that EXP's value doesn't change before
2218 its subsequent reuse(s) except through its potential reevaluation. */
2221 gnat_protect_expr (tree exp)
2223 tree type = TREE_TYPE (exp);
2224 enum tree_code code = TREE_CODE (exp);
2226 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2229 /* If EXP has no side effects, we theoritically don't need to do anything.
2230 However, we may be recursively passed more and more complex expressions
2231 involving checks which will be reused multiple times and eventually be
2232 unshared for gimplification; in order to avoid a complexity explosion
2233 at that point, we protect any expressions more complex than a simple
2234 arithmetic expression. */
2235 if (!TREE_SIDE_EFFECTS (exp))
2237 tree inner = skip_simple_arithmetic (exp);
2238 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2242 /* If this is a conversion, protect what's inside the conversion. */
2243 if (code == NON_LVALUE_EXPR
2244 || CONVERT_EXPR_CODE_P (code)
2245 || code == VIEW_CONVERT_EXPR)
2246 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2248 /* If we're indirectly referencing something, we only need to protect the
2249 address since the data itself can't change in these situations. */
2250 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2252 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2253 TREE_READONLY (t) = TYPE_READONLY (type);
2257 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2258 This may be more efficient, but will also allow us to more easily find
2259 the match for the PLACEHOLDER_EXPR. */
2260 if (code == COMPONENT_REF
2261 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2262 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2263 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2265 /* If this is a fat pointer or something that can be placed in a register,
2266 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2267 returned via invisible reference in most ABIs so the temporary will
2268 directly be filled by the callee. */
2269 if (TYPE_IS_FAT_POINTER_P (type)
2270 || TYPE_MODE (type) != BLKmode
2271 || code == CALL_EXPR)
2272 return save_expr (exp);
2274 /* Otherwise reference, protect the address and dereference. */
2276 build_unary_op (INDIRECT_REF, type,
2277 save_expr (build_unary_op (ADDR_EXPR,
2278 build_reference_type (type),
2282 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2283 argument to force evaluation of everything. */
2286 gnat_stabilize_reference_1 (tree e, bool force)
2288 enum tree_code code = TREE_CODE (e);
2289 tree type = TREE_TYPE (e);
2292 /* We cannot ignore const expressions because it might be a reference
2293 to a const array but whose index contains side-effects. But we can
2294 ignore things that are actual constant or that already have been
2295 handled by this function. */
2296 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2299 switch (TREE_CODE_CLASS (code))
2301 case tcc_exceptional:
2302 case tcc_declaration:
2303 case tcc_comparison:
2304 case tcc_expression:
2307 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2308 fat pointer. This may be more efficient, but will also allow
2309 us to more easily find the match for the PLACEHOLDER_EXPR. */
2310 if (code == COMPONENT_REF
2311 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2313 = build3 (code, type,
2314 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2315 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2316 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2317 so that it will only be evaluated once. */
2318 /* The tcc_reference and tcc_comparison classes could be handled as
2319 below, but it is generally faster to only evaluate them once. */
2320 else if (TREE_SIDE_EFFECTS (e) || force)
2321 return save_expr (e);
2327 /* Recursively stabilize each operand. */
2329 = build2 (code, type,
2330 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2331 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2335 /* Recursively stabilize each operand. */
2337 = build1 (code, type,
2338 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2345 /* See similar handling in gnat_stabilize_reference. */
2346 TREE_READONLY (result) = TREE_READONLY (e);
2347 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2348 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2353 /* This is equivalent to stabilize_reference in tree.c but we know how to
2354 handle our own nodes and we take extra arguments. FORCE says whether to
2355 force evaluation of everything. We set SUCCESS to true unless we walk
2356 through something we don't know how to stabilize. */
2359 gnat_stabilize_reference (tree ref, bool force, bool *success)
2361 tree type = TREE_TYPE (ref);
2362 enum tree_code code = TREE_CODE (ref);
2365 /* Assume we'll success unless proven otherwise. */
2375 /* No action is needed in this case. */
2381 case FIX_TRUNC_EXPR:
2382 case VIEW_CONVERT_EXPR:
2384 = build1 (code, type,
2385 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2390 case UNCONSTRAINED_ARRAY_REF:
2391 result = build1 (code, type,
2392 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2397 result = build3 (COMPONENT_REF, type,
2398 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2400 TREE_OPERAND (ref, 1), NULL_TREE);
2404 result = build3 (BIT_FIELD_REF, type,
2405 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2407 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2409 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2414 case ARRAY_RANGE_REF:
2415 result = build4 (code, type,
2416 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2418 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2420 NULL_TREE, NULL_TREE);
2424 result = gnat_stabilize_reference_1 (ref, force);
2428 result = build2 (COMPOUND_EXPR, type,
2429 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2431 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2436 /* Constructors with 1 element are used extensively to formally
2437 convert objects to special wrapping types. */
2438 if (TREE_CODE (type) == RECORD_TYPE
2439 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2442 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2444 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2446 = build_constructor_single (type, index,
2447 gnat_stabilize_reference_1 (value,
2459 ref = error_mark_node;
2461 /* ... fall through to failure ... */
2463 /* If arg isn't a kind of lvalue we recognize, make no change.
2464 Caller should recognize the error for an invalid lvalue. */
2471 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2472 may not be sustained across some paths, such as the way via build1 for
2473 INDIRECT_REF. We reset those flags here in the general case, which is
2474 consistent with the GCC version of this routine.
2476 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2477 paths introduce side-effects where there was none initially (e.g. if a
2478 SAVE_EXPR is built) and we also want to keep track of that. */
2479 TREE_READONLY (result) = TREE_READONLY (ref);
2480 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2481 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);