1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
34 #include "tree-inline.h"
51 static tree find_common_type (tree, tree);
52 static tree compare_arrays (tree, tree, tree);
53 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54 static tree build_simple_component_ref (tree, tree, tree, bool);
56 /* Return the base type of TYPE. */
59 get_base_type (tree type)
61 if (TREE_CODE (type) == RECORD_TYPE
62 && TYPE_JUSTIFIED_MODULAR_P (type))
63 type = TREE_TYPE (TYPE_FIELDS (type));
65 while (TREE_TYPE (type)
66 && (TREE_CODE (type) == INTEGER_TYPE
67 || TREE_CODE (type) == REAL_TYPE))
68 type = TREE_TYPE (type);
73 /* EXP is a GCC tree representing an address. See if we can find how
74 strictly the object at that address is aligned. Return that alignment
75 in bits. If we don't know anything about the alignment, return 0. */
78 known_alignment (tree exp)
80 unsigned int this_alignment;
81 unsigned int lhs, rhs;
83 switch (TREE_CODE (exp))
86 case VIEW_CONVERT_EXPR:
88 /* Conversions between pointers and integers don't change the alignment
89 of the underlying object. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
94 /* The value of a COMPOUND_EXPR is that of it's second operand. */
95 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
100 /* If two address are added, the alignment of the result is the
101 minimum of the two alignments. */
102 lhs = known_alignment (TREE_OPERAND (exp, 0));
103 rhs = known_alignment (TREE_OPERAND (exp, 1));
104 this_alignment = MIN (lhs, rhs);
107 case POINTER_PLUS_EXPR:
108 lhs = known_alignment (TREE_OPERAND (exp, 0));
109 rhs = known_alignment (TREE_OPERAND (exp, 1));
110 /* If we don't know the alignment of the offset, we assume that
113 this_alignment = lhs;
115 this_alignment = MIN (lhs, rhs);
119 /* If there is a choice between two values, use the smallest one. */
120 lhs = known_alignment (TREE_OPERAND (exp, 1));
121 rhs = known_alignment (TREE_OPERAND (exp, 2));
122 this_alignment = MIN (lhs, rhs);
127 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
128 /* The first part of this represents the lowest bit in the constant,
129 but it is originally in bytes, not bits. */
130 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
135 /* If we know the alignment of just one side, use it. Otherwise,
136 use the product of the alignments. */
137 lhs = known_alignment (TREE_OPERAND (exp, 0));
138 rhs = known_alignment (TREE_OPERAND (exp, 1));
141 this_alignment = rhs;
143 this_alignment = lhs;
145 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
149 /* A bit-and expression is as aligned as the maximum alignment of the
150 operands. We typically get here for a complex lhs and a constant
151 negative power of two on the rhs to force an explicit alignment, so
152 don't bother looking at the lhs. */
153 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
157 this_alignment = expr_align (TREE_OPERAND (exp, 0));
162 tree t = maybe_inline_call_in_expr (exp);
164 return known_alignment (t);
167 /* Fall through... */
170 /* For other pointer expressions, we assume that the pointed-to object
171 is at least as aligned as the pointed-to type. Beware that we can
172 have a dummy type here (e.g. a Taft Amendment type), for which the
173 alignment is meaningless and should be ignored. */
174 if (POINTER_TYPE_P (TREE_TYPE (exp))
175 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
176 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
182 return this_alignment;
185 /* We have a comparison or assignment operation on two types, T1 and T2, which
186 are either both array types or both record types. T1 is assumed to be for
187 the left hand side operand, and T2 for the right hand side. Return the
188 type that both operands should be converted to for the operation, if any.
189 Otherwise return zero. */
192 find_common_type (tree t1, tree t2)
194 /* ??? As of today, various constructs lead here with types of different
195 sizes even when both constants (e.g. tagged types, packable vs regular
196 component types, padded vs unpadded types, ...). While some of these
197 would better be handled upstream (types should be made consistent before
198 calling into build_binary_op), some others are really expected and we
199 have to be careful. */
201 /* We must prevent writing more than what the target may hold if this is for
202 an assignment and the case of tagged types is handled in build_binary_op
203 so use the lhs type if it is known to be smaller, or of constant size and
204 the rhs type is not, whatever the modes. We also force t1 in case of
205 constant size equality to minimize occurrences of view conversions on the
206 lhs of assignments. */
207 if (TREE_CONSTANT (TYPE_SIZE (t1))
208 && (!TREE_CONSTANT (TYPE_SIZE (t2))
209 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
212 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
213 that we will not have any alignment problems since, if we did, the
214 non-BLKmode type could not have been used. */
215 if (TYPE_MODE (t1) != BLKmode)
218 /* If the rhs type is of constant size, use it whatever the modes. At
219 this point it is known to be smaller, or of constant size and the
221 if (TREE_CONSTANT (TYPE_SIZE (t2)))
224 /* Otherwise, if the rhs type is non-BLKmode, use it. */
225 if (TYPE_MODE (t2) != BLKmode)
228 /* In this case, both types have variable size and BLKmode. It's
229 probably best to leave the "type mismatch" because changing it
230 could cause a bad self-referential reference. */
234 /* Return an expression tree representing an equality comparison of A1 and A2,
235 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
237 Two arrays are equal in one of two ways: (1) if both have zero length in
238 some dimension (not necessarily the same dimension) or (2) if the lengths
239 in each dimension are equal and the data is equal. We perform the length
240 tests in as efficient a manner as possible. */
243 compare_arrays (tree result_type, tree a1, tree a2)
245 tree result = convert (result_type, boolean_true_node);
246 tree a1_is_null = convert (result_type, boolean_false_node);
247 tree a2_is_null = convert (result_type, boolean_false_node);
248 tree t1 = TREE_TYPE (a1);
249 tree t2 = TREE_TYPE (a2);
250 tree result = convert (result_type, integer_one_node);
251 tree a1_is_null = convert (result_type, integer_zero_node);
252 tree a2_is_null = convert (result_type, integer_zero_node);
253 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
254 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
255 bool length_zero_p = false;
257 /* If either operand has side-effects, they have to be evaluated only once
258 in spite of the multiple references to the operand in the comparison. */
259 if (a1_side_effects_p)
260 a1 = gnat_protect_expr (a1);
262 if (a2_side_effects_p)
263 a2 = gnat_protect_expr (a2);
265 /* Process each dimension separately and compare the lengths. If any
266 dimension has a length known to be zero, set LENGTH_ZERO_P to true
267 in order to suppress the comparison of the data at the end. */
268 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
270 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
271 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
272 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
273 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
274 tree bt = get_base_type (TREE_TYPE (lb1));
275 tree length1 = fold_build2 (MINUS_EXPR, bt, ub1, lb1);
276 tree length2 = fold_build2 (MINUS_EXPR, bt, ub2, lb2);
277 tree comparison, this_a1_is_null, this_a2_is_null;
281 /* If the length of the first array is a constant, swap our operands
282 unless the length of the second array is the constant zero. */
283 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
288 tem = a1, a1 = a2, a2 = tem;
289 tem = t1, t1 = t2, t2 = tem;
290 tem = lb1, lb1 = lb2, lb2 = tem;
291 tem = ub1, ub1 = ub2, ub2 = tem;
292 tem = length1, length1 = length2, length2 = tem;
293 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
294 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
295 a2_side_effects_p = btem;
298 /* If the length of the second array is the constant zero, we can just
299 use the original stored bounds for the first array and see whether
300 last < first holds. */
301 if (integer_zerop (length2))
303 length_zero_p = true;
305 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
306 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
308 comparison = build_binary_op (LT_EXPR, result_type, ub1, lb1);
309 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
310 if (EXPR_P (comparison))
311 SET_EXPR_LOCATION (comparison, input_location);
313 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
315 this_a1_is_null = comparison;
316 this_a2_is_null = convert (result_type, boolean_true_node);
319 /* Otherwise, if the length is some other constant value, we know that
320 this dimension in the second array cannot be superflat, so we can
321 just use its length computed from the actual stored bounds. */
322 else if (TREE_CODE (length2) == INTEGER_CST)
326 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
327 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
328 /* Note that we know that UB2 and LB2 are constant and hence
329 cannot contain a PLACEHOLDER_EXPR. */
330 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
331 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
332 bt = get_base_type (TREE_TYPE (ub1));
335 = build_binary_op (EQ_EXPR, result_type,
336 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
337 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
338 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
339 if (EXPR_P (comparison))
340 SET_EXPR_LOCATION (comparison, input_location);
342 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
344 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
345 if (EXPR_P (this_a1_is_null))
346 SET_EXPR_LOCATION (this_a1_is_null, input_location);
347 this_a2_is_null = convert (result_type, integer_zero_node);
350 /* Otherwise, compare the computed lengths. */
353 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
354 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
357 = build_binary_op (EQ_EXPR, result_type, length1, length2);
358 if (EXPR_P (comparison))
359 SET_EXPR_LOCATION (comparison, input_location);
362 = build_binary_op (LT_EXPR, result_type, length1,
363 convert (bt, integer_zero_node));
364 if (EXPR_P (this_a1_is_null))
365 SET_EXPR_LOCATION (this_a1_is_null, input_location);
368 = build_binary_op (LT_EXPR, result_type, length2,
369 convert (bt, integer_zero_node));
370 if (EXPR_P (this_a2_is_null))
371 SET_EXPR_LOCATION (this_a2_is_null, input_location);
374 /* Append expressions for this dimension to the final expressions. */
375 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
378 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
379 this_a1_is_null, a1_is_null);
381 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
382 this_a2_is_null, a2_is_null);
388 /* Unless the length of some dimension is known to be zero, compare the
389 data in the array. */
392 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
397 a1 = convert (type, a1),
398 a2 = convert (type, a2);
401 comparison = fold_build2 (EQ_EXPR, result_type, a1, a2);
402 if (EXPR_P (comparison))
403 SET_EXPR_LOCATION (comparison, input_location);
406 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
409 /* The result is also true if both sizes are zero. */
410 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
411 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
412 a1_is_null, a2_is_null),
415 /* If either operand has side-effects, they have to be evaluated before
416 starting the comparison above since the place they would be otherwise
417 evaluated could be wrong. */
418 if (a1_side_effects_p)
419 result = build2 (COMPOUND_EXPR, result_type, a1, result);
421 if (a2_side_effects_p)
422 result = build2 (COMPOUND_EXPR, result_type, a2, result);
427 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
428 type TYPE. We know that TYPE is a modular type with a nonbinary
432 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
435 tree modulus = TYPE_MODULUS (type);
436 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
437 unsigned int precision;
438 bool unsignedp = true;
442 /* If this is an addition of a constant, convert it to a subtraction
443 of a constant since we can do that faster. */
444 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
446 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
447 op_code = MINUS_EXPR;
450 /* For the logical operations, we only need PRECISION bits. For
451 addition and subtraction, we need one more and for multiplication we
452 need twice as many. But we never want to make a size smaller than
454 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
455 needed_precision += 1;
456 else if (op_code == MULT_EXPR)
457 needed_precision *= 2;
459 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
461 /* Unsigned will do for everything but subtraction. */
462 if (op_code == MINUS_EXPR)
465 /* If our type is the wrong signedness or isn't wide enough, make a new
466 type and convert both our operands to it. */
467 if (TYPE_PRECISION (op_type) < precision
468 || TYPE_UNSIGNED (op_type) != unsignedp)
470 /* Copy the node so we ensure it can be modified to make it modular. */
471 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
472 modulus = convert (op_type, modulus);
473 SET_TYPE_MODULUS (op_type, modulus);
474 TYPE_MODULAR_P (op_type) = 1;
475 lhs = convert (op_type, lhs);
476 rhs = convert (op_type, rhs);
479 /* Do the operation, then we'll fix it up. */
480 result = fold_build2 (op_code, op_type, lhs, rhs);
482 /* For multiplication, we have no choice but to do a full modulus
483 operation. However, we want to do this in the narrowest
485 if (op_code == MULT_EXPR)
487 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
488 modulus = convert (div_type, modulus);
489 SET_TYPE_MODULUS (div_type, modulus);
490 TYPE_MODULAR_P (div_type) = 1;
491 result = convert (op_type,
492 fold_build2 (TRUNC_MOD_EXPR, div_type,
493 convert (div_type, result), modulus));
496 /* For subtraction, add the modulus back if we are negative. */
497 else if (op_code == MINUS_EXPR)
499 result = gnat_protect_expr (result);
500 result = fold_build3 (COND_EXPR, op_type,
501 fold_build2 (LT_EXPR, boolean_type_node, result,
502 convert (op_type, integer_zero_node)),
503 fold_build2 (PLUS_EXPR, op_type, result, modulus),
507 /* For the other operations, subtract the modulus if we are >= it. */
510 result = gnat_protect_expr (result);
511 result = fold_build3 (COND_EXPR, op_type,
512 fold_build2 (GE_EXPR, boolean_type_node,
514 fold_build2 (MINUS_EXPR, op_type,
519 return convert (type, result);
522 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
523 desired for the result. Usually the operation is to be performed
524 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
525 in which case the type to be used will be derived from the operands.
527 This function is very much unlike the ones for C and C++ since we
528 have already done any type conversion and matching required. All we
529 have to do here is validate the work done by SEM and handle subtypes. */
532 build_binary_op (enum tree_code op_code, tree result_type,
533 tree left_operand, tree right_operand)
535 tree left_type = TREE_TYPE (left_operand);
536 tree right_type = TREE_TYPE (right_operand);
537 tree left_base_type = get_base_type (left_type);
538 tree right_base_type = get_base_type (right_type);
539 tree operation_type = result_type;
540 tree best_type = NULL_TREE;
541 tree modulus, result;
542 bool has_side_effects = false;
545 && TREE_CODE (operation_type) == RECORD_TYPE
546 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
547 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
550 && !AGGREGATE_TYPE_P (operation_type)
551 && TYPE_EXTRA_SUBTYPE_P (operation_type))
552 operation_type = get_base_type (operation_type);
554 modulus = (operation_type
555 && TREE_CODE (operation_type) == INTEGER_TYPE
556 && TYPE_MODULAR_P (operation_type)
557 ? TYPE_MODULUS (operation_type) : NULL_TREE);
563 /* If there were integral or pointer conversions on the LHS, remove
564 them; we'll be putting them back below if needed. Likewise for
565 conversions between array and record types, except for justified
566 modular types. But don't do this if the right operand is not
567 BLKmode (for packed arrays) unless we are not changing the mode. */
568 while ((CONVERT_EXPR_P (left_operand)
569 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
570 && (((INTEGRAL_TYPE_P (left_type)
571 || POINTER_TYPE_P (left_type))
572 && (INTEGRAL_TYPE_P (TREE_TYPE
573 (TREE_OPERAND (left_operand, 0)))
574 || POINTER_TYPE_P (TREE_TYPE
575 (TREE_OPERAND (left_operand, 0)))))
576 || (((TREE_CODE (left_type) == RECORD_TYPE
577 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
578 || TREE_CODE (left_type) == ARRAY_TYPE)
579 && ((TREE_CODE (TREE_TYPE
580 (TREE_OPERAND (left_operand, 0)))
582 || (TREE_CODE (TREE_TYPE
583 (TREE_OPERAND (left_operand, 0)))
585 && (TYPE_MODE (right_type) == BLKmode
586 || (TYPE_MODE (left_type)
587 == TYPE_MODE (TREE_TYPE
589 (left_operand, 0))))))))
591 left_operand = TREE_OPERAND (left_operand, 0);
592 left_type = TREE_TYPE (left_operand);
595 /* If a class-wide type may be involved, force use of the RHS type. */
596 if ((TREE_CODE (right_type) == RECORD_TYPE
597 || TREE_CODE (right_type) == UNION_TYPE)
598 && TYPE_ALIGN_OK (right_type))
599 operation_type = right_type;
601 /* If we are copying between padded objects with compatible types, use
602 the padded view of the objects, this is very likely more efficient.
603 Likewise for a padded object that is assigned a constructor, if we
604 can convert the constructor to the inner type, to avoid putting a
605 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
606 actually copied anything. */
607 else if (TYPE_IS_PADDING_P (left_type)
608 && TREE_CONSTANT (TYPE_SIZE (left_type))
609 && ((TREE_CODE (right_operand) == COMPONENT_REF
611 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
612 && gnat_types_compatible_p
614 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
615 || (TREE_CODE (right_operand) == CONSTRUCTOR
616 && !CONTAINS_PLACEHOLDER_P
617 (DECL_SIZE (TYPE_FIELDS (left_type)))))
618 && !integer_zerop (TYPE_SIZE (right_type)))
619 operation_type = left_type;
621 /* Find the best type to use for copying between aggregate types. */
622 else if (((TREE_CODE (left_type) == ARRAY_TYPE
623 && TREE_CODE (right_type) == ARRAY_TYPE)
624 || (TREE_CODE (left_type) == RECORD_TYPE
625 && TREE_CODE (right_type) == RECORD_TYPE))
626 && (best_type = find_common_type (left_type, right_type)))
627 operation_type = best_type;
629 /* Otherwise use the LHS type. */
630 else if (!operation_type)
631 operation_type = left_type;
633 /* Ensure everything on the LHS is valid. If we have a field reference,
634 strip anything that get_inner_reference can handle. Then remove any
635 conversions between types having the same code and mode. And mark
636 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
637 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
638 result = left_operand;
641 tree restype = TREE_TYPE (result);
643 if (TREE_CODE (result) == COMPONENT_REF
644 || TREE_CODE (result) == ARRAY_REF
645 || TREE_CODE (result) == ARRAY_RANGE_REF)
646 while (handled_component_p (result))
647 result = TREE_OPERAND (result, 0);
648 else if (TREE_CODE (result) == REALPART_EXPR
649 || TREE_CODE (result) == IMAGPART_EXPR
650 || (CONVERT_EXPR_P (result)
651 && (((TREE_CODE (restype)
652 == TREE_CODE (TREE_TYPE
653 (TREE_OPERAND (result, 0))))
654 && (TYPE_MODE (TREE_TYPE
655 (TREE_OPERAND (result, 0)))
656 == TYPE_MODE (restype)))
657 || TYPE_ALIGN_OK (restype))))
658 result = TREE_OPERAND (result, 0);
659 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
661 TREE_ADDRESSABLE (result) = 1;
662 result = TREE_OPERAND (result, 0);
668 gcc_assert (TREE_CODE (result) == INDIRECT_REF
669 || TREE_CODE (result) == NULL_EXPR
672 /* Convert the right operand to the operation type unless it is
673 either already of the correct type or if the type involves a
674 placeholder, since the RHS may not have the same record type. */
675 if (operation_type != right_type
676 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
678 right_operand = convert (operation_type, right_operand);
679 right_type = operation_type;
682 /* If the left operand is not of the same type as the operation
683 type, wrap it up in a VIEW_CONVERT_EXPR. */
684 if (left_type != operation_type)
685 left_operand = unchecked_convert (operation_type, left_operand, false);
687 has_side_effects = true;
693 operation_type = TREE_TYPE (left_type);
695 /* ... fall through ... */
697 case ARRAY_RANGE_REF:
698 /* First look through conversion between type variants. Note that
699 this changes neither the operation type nor the type domain. */
700 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
701 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
702 == TYPE_MAIN_VARIANT (left_type))
704 left_operand = TREE_OPERAND (left_operand, 0);
705 left_type = TREE_TYPE (left_operand);
708 /* For a range, make sure the element type is consistent. */
709 if (op_code == ARRAY_RANGE_REF
710 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
711 operation_type = build_array_type (TREE_TYPE (left_type),
712 TYPE_DOMAIN (operation_type));
714 /* Then convert the right operand to its base type. This will prevent
715 unneeded sign conversions when sizetype is wider than integer. */
716 right_operand = convert (right_base_type, right_operand);
717 right_operand = convert (sizetype, right_operand);
719 if (!TREE_CONSTANT (right_operand)
720 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
721 gnat_mark_addressable (left_operand);
726 case TRUTH_ANDIF_EXPR:
727 case TRUTH_ORIF_EXPR:
731 #ifdef ENABLE_CHECKING
732 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
734 operation_type = left_base_type;
735 left_operand = convert (operation_type, left_operand);
736 right_operand = convert (operation_type, right_operand);
745 #ifdef ENABLE_CHECKING
746 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
748 /* If either operand is a NULL_EXPR, just return a new one. */
749 if (TREE_CODE (left_operand) == NULL_EXPR)
750 return build2 (op_code, result_type,
751 build1 (NULL_EXPR, integer_type_node,
752 TREE_OPERAND (left_operand, 0)),
755 else if (TREE_CODE (right_operand) == NULL_EXPR)
756 return build2 (op_code, result_type,
757 build1 (NULL_EXPR, integer_type_node,
758 TREE_OPERAND (right_operand, 0)),
761 /* If either object is a justified modular types, get the
762 fields from within. */
763 if (TREE_CODE (left_type) == RECORD_TYPE
764 && TYPE_JUSTIFIED_MODULAR_P (left_type))
766 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
768 left_type = TREE_TYPE (left_operand);
769 left_base_type = get_base_type (left_type);
772 if (TREE_CODE (right_type) == RECORD_TYPE
773 && TYPE_JUSTIFIED_MODULAR_P (right_type))
775 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
777 right_type = TREE_TYPE (right_operand);
778 right_base_type = get_base_type (right_type);
781 /* If both objects are arrays, compare them specially. */
782 if ((TREE_CODE (left_type) == ARRAY_TYPE
783 || (TREE_CODE (left_type) == INTEGER_TYPE
784 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
785 && (TREE_CODE (right_type) == ARRAY_TYPE
786 || (TREE_CODE (right_type) == INTEGER_TYPE
787 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
789 result = compare_arrays (result_type, left_operand, right_operand);
791 if (op_code == NE_EXPR)
792 result = invert_truthvalue (result);
794 gcc_assert (op_code == EQ_EXPR);
799 /* Otherwise, the base types must be the same, unless they are both fat
800 pointer types or record types. In the latter case, use the best type
801 and convert both operands to that type. */
802 if (left_base_type != right_base_type)
804 if (TYPE_IS_FAT_POINTER_P (left_base_type)
805 && TYPE_IS_FAT_POINTER_P (right_base_type))
807 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
808 == TYPE_MAIN_VARIANT (right_base_type));
809 best_type = left_base_type;
812 else if (TREE_CODE (left_base_type) == RECORD_TYPE
813 && TREE_CODE (right_base_type) == RECORD_TYPE)
815 /* The only way this is permitted is if both types have the same
816 name. In that case, one of them must not be self-referential.
817 Use it as the best type. Even better with a fixed size. */
818 gcc_assert (TYPE_NAME (left_base_type)
819 && TYPE_NAME (left_base_type)
820 == TYPE_NAME (right_base_type));
822 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
823 best_type = left_base_type;
824 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
825 best_type = right_base_type;
826 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
827 best_type = left_base_type;
828 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
829 best_type = right_base_type;
837 left_operand = convert (best_type, left_operand);
838 right_operand = convert (best_type, right_operand);
842 left_operand = convert (left_base_type, left_operand);
843 right_operand = convert (right_base_type, right_operand);
846 /* If we are comparing a fat pointer against zero, we just need to
847 compare the data pointer. */
848 if (TYPE_IS_FAT_POINTER_P (left_base_type)
849 && TREE_CODE (right_operand) == CONSTRUCTOR
850 && integer_zerop (VEC_index (constructor_elt,
851 CONSTRUCTOR_ELTS (right_operand),
855 = build_component_ref (left_operand, NULL_TREE,
856 TYPE_FIELDS (left_base_type), false);
858 = convert (TREE_TYPE (left_operand), integer_zero_node);
868 /* The RHS of a shift can be any type. Also, ignore any modulus
869 (we used to abort, but this is needed for unchecked conversion
870 to modular types). Otherwise, processing is the same as normal. */
871 gcc_assert (operation_type == left_base_type);
873 left_operand = convert (operation_type, left_operand);
879 /* For binary modulus, if the inputs are in range, so are the
881 if (modulus && integer_pow2p (modulus))
886 gcc_assert (TREE_TYPE (result_type) == left_base_type
887 && TREE_TYPE (result_type) == right_base_type);
888 left_operand = convert (left_base_type, left_operand);
889 right_operand = convert (right_base_type, right_operand);
892 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
893 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
894 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
895 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
896 /* These always produce results lower than either operand. */
900 case POINTER_PLUS_EXPR:
901 gcc_assert (operation_type == left_base_type
902 && sizetype == right_base_type);
903 left_operand = convert (operation_type, left_operand);
904 right_operand = convert (sizetype, right_operand);
907 case PLUS_NOMOD_EXPR:
908 case MINUS_NOMOD_EXPR:
909 if (op_code == PLUS_NOMOD_EXPR)
912 op_code = MINUS_EXPR;
915 /* ... fall through ... */
919 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
920 other compilers. Contrary to C, Ada doesn't allow arithmetics in
921 these types but can generate addition/subtraction for Succ/Pred. */
923 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
924 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
925 operation_type = left_base_type = right_base_type
926 = gnat_type_for_mode (TYPE_MODE (operation_type),
927 TYPE_UNSIGNED (operation_type));
929 /* ... fall through ... */
933 /* The result type should be the same as the base types of the
934 both operands (and they should be the same). Convert
935 everything to the result type. */
937 gcc_assert (operation_type == left_base_type
938 && left_base_type == right_base_type);
939 left_operand = convert (operation_type, left_operand);
940 right_operand = convert (operation_type, right_operand);
943 if (modulus && !integer_pow2p (modulus))
945 result = nonbinary_modular_operation (op_code, operation_type,
946 left_operand, right_operand);
949 /* If either operand is a NULL_EXPR, just return a new one. */
950 else if (TREE_CODE (left_operand) == NULL_EXPR)
951 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
952 else if (TREE_CODE (right_operand) == NULL_EXPR)
953 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
954 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
955 result = fold (build4 (op_code, operation_type, left_operand,
956 right_operand, NULL_TREE, NULL_TREE));
959 = fold_build2 (op_code, operation_type, left_operand, right_operand);
961 TREE_SIDE_EFFECTS (result) |= has_side_effects;
962 TREE_CONSTANT (result)
963 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
964 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
966 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
967 && TYPE_VOLATILE (operation_type))
968 TREE_THIS_VOLATILE (result) = 1;
970 /* If we are working with modular types, perform the MOD operation
971 if something above hasn't eliminated the need for it. */
973 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
974 convert (operation_type, modulus));
976 if (result_type && result_type != operation_type)
977 result = convert (result_type, result);
982 /* Similar, but for unary operations. */
985 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
987 tree type = TREE_TYPE (operand);
988 tree base_type = get_base_type (type);
989 tree operation_type = result_type;
991 bool side_effects = false;
994 && TREE_CODE (operation_type) == RECORD_TYPE
995 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
996 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
999 && !AGGREGATE_TYPE_P (operation_type)
1000 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1001 operation_type = get_base_type (operation_type);
1007 if (!operation_type)
1008 result_type = operation_type = TREE_TYPE (type);
1010 gcc_assert (result_type == TREE_TYPE (type));
1012 result = fold_build1 (op_code, operation_type, operand);
1015 case TRUTH_NOT_EXPR:
1016 #ifdef ENABLE_CHECKING
1017 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1019 result = invert_truthvalue (operand);
1022 case ATTR_ADDR_EXPR:
1024 switch (TREE_CODE (operand))
1027 case UNCONSTRAINED_ARRAY_REF:
1028 result = TREE_OPERAND (operand, 0);
1030 /* Make sure the type here is a pointer, not a reference.
1031 GCC wants pointer types for function addresses. */
1033 result_type = build_pointer_type (type);
1035 /* If the underlying object can alias everything, propagate the
1036 property since we are effectively retrieving the object. */
1037 if (POINTER_TYPE_P (TREE_TYPE (result))
1038 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1040 if (TREE_CODE (result_type) == POINTER_TYPE
1041 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1043 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1044 TYPE_MODE (result_type),
1046 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1047 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1049 = build_reference_type_for_mode (TREE_TYPE (result_type),
1050 TYPE_MODE (result_type),
1057 TREE_TYPE (result) = type = build_pointer_type (type);
1061 /* Fold a compound expression if it has unconstrained array type
1062 since the middle-end cannot handle it. But we don't it in the
1063 general case because it may introduce aliasing issues if the
1064 first operand is an indirect assignment and the second operand
1065 the corresponding address, e.g. for an allocator. */
1066 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1068 result = build_unary_op (ADDR_EXPR, result_type,
1069 TREE_OPERAND (operand, 1));
1070 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1071 TREE_OPERAND (operand, 0), result);
1077 case ARRAY_RANGE_REF:
1080 /* If this is for 'Address, find the address of the prefix and add
1081 the offset to the field. Otherwise, do this the normal way. */
1082 if (op_code == ATTR_ADDR_EXPR)
1084 HOST_WIDE_INT bitsize;
1085 HOST_WIDE_INT bitpos;
1087 enum machine_mode mode;
1088 int unsignedp, volatilep;
1090 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1091 &mode, &unsignedp, &volatilep,
1094 /* If INNER is a padding type whose field has a self-referential
1095 size, convert to that inner type. We know the offset is zero
1096 and we need to have that type visible. */
1097 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1098 && CONTAINS_PLACEHOLDER_P
1099 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1100 (TREE_TYPE (inner))))))
1101 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1104 /* Compute the offset as a byte offset from INNER. */
1106 offset = size_zero_node;
1108 offset = size_binop (PLUS_EXPR, offset,
1109 size_int (bitpos / BITS_PER_UNIT));
1111 /* Take the address of INNER, convert the offset to void *, and
1112 add then. It will later be converted to the desired result
1114 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1115 inner = convert (ptr_void_type_node, inner);
1116 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1118 result = convert (build_pointer_type (TREE_TYPE (operand)),
1125 /* If this is just a constructor for a padded record, we can
1126 just take the address of the single field and convert it to
1127 a pointer to our type. */
1128 if (TYPE_IS_PADDING_P (type))
1130 result = VEC_index (constructor_elt,
1131 CONSTRUCTOR_ELTS (operand),
1133 result = convert (build_pointer_type (TREE_TYPE (operand)),
1134 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1141 if (AGGREGATE_TYPE_P (type)
1142 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1143 return build_unary_op (ADDR_EXPR, result_type,
1144 TREE_OPERAND (operand, 0));
1146 /* ... fallthru ... */
1148 case VIEW_CONVERT_EXPR:
1149 /* If this just a variant conversion or if the conversion doesn't
1150 change the mode, get the result type from this type and go down.
1151 This is needed for conversions of CONST_DECLs, to eventually get
1152 to the address of their CORRESPONDING_VARs. */
1153 if ((TYPE_MAIN_VARIANT (type)
1154 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1155 || (TYPE_MODE (type) != BLKmode
1156 && (TYPE_MODE (type)
1157 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1158 return build_unary_op (ADDR_EXPR,
1159 (result_type ? result_type
1160 : build_pointer_type (type)),
1161 TREE_OPERAND (operand, 0));
1165 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1167 /* ... fall through ... */
1172 /* If we are taking the address of a padded record whose field is
1173 contains a template, take the address of the template. */
1174 if (TYPE_IS_PADDING_P (type)
1175 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1176 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1178 type = TREE_TYPE (TYPE_FIELDS (type));
1179 operand = convert (type, operand);
1182 gnat_mark_addressable (operand);
1183 result = build_fold_addr_expr (operand);
1186 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1190 /* If we want to refer to an unconstrained array, use the appropriate
1191 expression to do so. This will never survive down to the back-end.
1192 But if TYPE is a thin pointer, first convert to a fat pointer. */
1193 if (TYPE_IS_THIN_POINTER_P (type)
1194 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1197 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1199 type = TREE_TYPE (operand);
1202 if (TYPE_IS_FAT_POINTER_P (type))
1204 result = build1 (UNCONSTRAINED_ARRAY_REF,
1205 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1206 TREE_READONLY (result)
1207 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1210 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1211 else if (TREE_CODE (operand) == ADDR_EXPR)
1212 result = TREE_OPERAND (operand, 0);
1214 /* Otherwise, build and fold the indirect reference. */
1217 result = build_fold_indirect_ref (operand);
1218 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1222 = (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1228 tree modulus = ((operation_type
1229 && TREE_CODE (operation_type) == INTEGER_TYPE
1230 && TYPE_MODULAR_P (operation_type))
1231 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1232 int mod_pow2 = modulus && integer_pow2p (modulus);
1234 /* If this is a modular type, there are various possibilities
1235 depending on the operation and whether the modulus is a
1236 power of two or not. */
1240 gcc_assert (operation_type == base_type);
1241 operand = convert (operation_type, operand);
1243 /* The fastest in the negate case for binary modulus is
1244 the straightforward code; the TRUNC_MOD_EXPR below
1245 is an AND operation. */
1246 if (op_code == NEGATE_EXPR && mod_pow2)
1247 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1248 fold_build1 (NEGATE_EXPR, operation_type,
1252 /* For nonbinary negate case, return zero for zero operand,
1253 else return the modulus minus the operand. If the modulus
1254 is a power of two minus one, we can do the subtraction
1255 as an XOR since it is equivalent and faster on most machines. */
1256 else if (op_code == NEGATE_EXPR && !mod_pow2)
1258 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1260 convert (operation_type,
1261 integer_one_node))))
1262 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1265 result = fold_build2 (MINUS_EXPR, operation_type,
1268 result = fold_build3 (COND_EXPR, operation_type,
1269 fold_build2 (NE_EXPR,
1274 integer_zero_node)),
1279 /* For the NOT cases, we need a constant equal to
1280 the modulus minus one. For a binary modulus, we
1281 XOR against the constant and subtract the operand from
1282 that constant for nonbinary modulus. */
1284 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1285 convert (operation_type,
1289 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1292 result = fold_build2 (MINUS_EXPR, operation_type,
1300 /* ... fall through ... */
1303 gcc_assert (operation_type == base_type);
1304 result = fold_build1 (op_code, operation_type,
1305 convert (operation_type, operand));
1310 TREE_SIDE_EFFECTS (result) = 1;
1311 if (TREE_CODE (result) == INDIRECT_REF)
1312 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1315 if (result_type && TREE_TYPE (result) != result_type)
1316 result = convert (result_type, result);
1321 /* Similar, but for COND_EXPR. */
1324 build_cond_expr (tree result_type, tree condition_operand,
1325 tree true_operand, tree false_operand)
1327 bool addr_p = false;
1330 /* The front-end verified that result, true and false operands have
1331 same base type. Convert everything to the result type. */
1332 true_operand = convert (result_type, true_operand);
1333 false_operand = convert (result_type, false_operand);
1335 /* If the result type is unconstrained, take the address of the operands and
1336 then dereference the result. Likewise if the result type is passed by
1337 reference, but this is natively handled in the gimplifier. */
1338 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1339 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1341 result_type = build_pointer_type (result_type);
1342 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1343 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1347 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1348 true_operand, false_operand);
1350 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1351 in both arms, make sure it gets evaluated by moving it ahead of the
1352 conditional expression. This is necessary because it is evaluated
1353 in only one place at run time and would otherwise be uninitialized
1354 in one of the arms. */
1355 true_operand = skip_simple_arithmetic (true_operand);
1356 false_operand = skip_simple_arithmetic (false_operand);
1358 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1359 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1362 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1367 /* Similar, but for RETURN_EXPR. If RET_VAL is non-null, build a RETURN_EXPR
1368 around the assignment of RET_VAL to RET_OBJ. Otherwise just build a bare
1369 RETURN_EXPR around RESULT_OBJ, which may be null in this case. */
1372 build_return_expr (tree ret_obj, tree ret_val)
1378 /* The gimplifier explicitly enforces the following invariant:
1387 As a consequence, type consistency dictates that we use the type
1388 of the RET_OBJ as the operation type. */
1389 tree operation_type = TREE_TYPE (ret_obj);
1391 /* Convert the right operand to the operation type. Note that it's the
1392 same transformation as in the MODIFY_EXPR case of build_binary_op,
1393 with the assumption that the type cannot involve a placeholder. */
1394 if (operation_type != TREE_TYPE (ret_val))
1395 ret_val = convert (operation_type, ret_val);
1397 result_expr = build2 (MODIFY_EXPR, operation_type, ret_obj, ret_val);
1400 result_expr = ret_obj;
1402 return build1 (RETURN_EXPR, void_type_node, result_expr);
1405 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1409 build_call_1_expr (tree fundecl, tree arg)
1411 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1412 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1414 TREE_SIDE_EFFECTS (call) = 1;
1418 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1422 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1424 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1425 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1427 TREE_SIDE_EFFECTS (call) = 1;
1431 /* Likewise to call FUNDECL with no arguments. */
1434 build_call_0_expr (tree fundecl)
1436 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1437 it possible to propagate DECL_IS_PURE on parameterless functions. */
1438 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1439 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1444 /* Call a function that raises an exception and pass the line number and file
1445 name, if requested. MSG says which exception function to call.
1447 GNAT_NODE is the gnat node conveying the source location for which the
1448 error should be signaled, or Empty in which case the error is signaled on
1449 the current ref_file_name/input_line.
1451 KIND says which kind of exception this is for
1452 (N_Raise_{Constraint,Storage,Program}_Error). */
1455 build_call_raise (int msg, Node_Id gnat_node, char kind)
1457 tree fndecl = gnat_raise_decls[msg];
1458 tree label = get_exception_label (kind);
1464 /* If this is to be done as a goto, handle that case. */
1467 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1468 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1470 /* If Local_Raise is present, generate
1471 Local_Raise (exception'Identity); */
1472 if (Present (local_raise))
1474 tree gnu_local_raise
1475 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1476 tree gnu_exception_entity
1477 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1479 = build_call_1_expr (gnu_local_raise,
1480 build_unary_op (ADDR_EXPR, NULL_TREE,
1481 gnu_exception_entity));
1483 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1484 gnu_call, gnu_result);}
1490 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1492 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1493 ? IDENTIFIER_POINTER
1494 (get_identifier (Get_Name_String
1496 (Get_Source_File_Index (Sloc (gnat_node))))))
1500 filename = build_string (len, str);
1502 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1503 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1505 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1506 build_index_type (size_int (len)));
1509 build_call_2_expr (fndecl,
1511 build_pointer_type (unsigned_char_type_node),
1513 build_int_cst (NULL_TREE, line_number));
1516 /* qsort comparer for the bit positions of two constructor elements
1517 for record components. */
1520 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1522 const_tree const elmt1 = * (const_tree const *) rt1;
1523 const_tree const elmt2 = * (const_tree const *) rt2;
1524 const_tree const field1 = TREE_PURPOSE (elmt1);
1525 const_tree const field2 = TREE_PURPOSE (elmt2);
1527 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1529 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1532 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1535 gnat_build_constructor (tree type, tree list)
1537 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1538 bool side_effects = false;
1542 /* Scan the elements to see if they are all constant or if any has side
1543 effects, to let us set global flags on the resulting constructor. Count
1544 the elements along the way for possible sorting purposes below. */
1545 for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1547 tree obj = TREE_PURPOSE (elmt);
1548 tree val = TREE_VALUE (elmt);
1550 /* The predicate must be in keeping with output_constructor. */
1551 if (!TREE_CONSTANT (val)
1552 || (TREE_CODE (type) == RECORD_TYPE
1553 && CONSTRUCTOR_BITFIELD_P (obj)
1554 && !initializer_constant_valid_for_bitfield_p (val))
1555 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1556 allconstant = false;
1558 if (TREE_SIDE_EFFECTS (val))
1559 side_effects = true;
1562 /* For record types with constant components only, sort field list
1563 by increasing bit position. This is necessary to ensure the
1564 constructor can be output as static data. */
1565 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1567 /* Fill an array with an element tree per index, and ask qsort to order
1568 them according to what a bitpos comparison function says. */
1569 tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1572 for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1575 qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1577 /* Then reconstruct the list from the sorted array contents. */
1579 for (i = n_elmts - 1; i >= 0; i--)
1581 TREE_CHAIN (gnu_arr[i]) = list;
1586 result = build_constructor_from_list (type, list);
1587 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1588 TREE_SIDE_EFFECTS (result) = side_effects;
1589 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1593 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1594 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1595 for the field. Don't fold the result if NO_FOLD_P is true.
1597 We also handle the fact that we might have been passed a pointer to the
1598 actual record and know how to look for fields in variant parts. */
1601 build_simple_component_ref (tree record_variable, tree component,
1602 tree field, bool no_fold_p)
1604 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1605 tree ref, inner_variable;
1607 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1608 || TREE_CODE (record_type) == UNION_TYPE
1609 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1610 && TYPE_SIZE (record_type)
1611 && (component != 0) != (field != 0));
1613 /* If no field was specified, look for a field with the specified name
1614 in the current record only. */
1616 for (field = TYPE_FIELDS (record_type); field;
1617 field = TREE_CHAIN (field))
1618 if (DECL_NAME (field) == component)
1624 /* If this field is not in the specified record, see if we can find
1625 something in the record whose original field is the same as this one. */
1626 if (DECL_CONTEXT (field) != record_type)
1627 /* Check if there is a field with name COMPONENT in the record. */
1631 /* First loop thru normal components. */
1632 for (new_field = TYPE_FIELDS (record_type); new_field;
1633 new_field = TREE_CHAIN (new_field))
1634 if (SAME_FIELD_P (field, new_field))
1637 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1638 the component in the first search. Doing this search in 2 steps
1639 is required to avoiding hidden homonymous fields in the
1642 for (new_field = TYPE_FIELDS (record_type); new_field;
1643 new_field = TREE_CHAIN (new_field))
1644 if (DECL_INTERNAL_P (new_field))
1647 = build_simple_component_ref (record_variable,
1648 NULL_TREE, new_field, no_fold_p);
1649 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1662 /* If the field's offset has overflowed, do not attempt to access it
1663 as doing so may trigger sanity checks deeper in the back-end.
1664 Note that we don't need to warn since this will be done on trying
1665 to declare the object. */
1666 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1667 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1670 /* Look through conversion between type variants. Note that this
1671 is transparent as far as the field is concerned. */
1672 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1673 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1675 inner_variable = TREE_OPERAND (record_variable, 0);
1677 inner_variable = record_variable;
1679 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1682 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1683 TREE_READONLY (ref) = 1;
1684 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1685 || TYPE_VOLATILE (record_type))
1686 TREE_THIS_VOLATILE (ref) = 1;
1691 /* The generic folder may punt in this case because the inner array type
1692 can be self-referential, but folding is in fact not problematic. */
1693 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1694 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1696 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1697 unsigned HOST_WIDE_INT idx;
1699 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1709 /* Like build_simple_component_ref, except that we give an error if the
1710 reference could not be found. */
1713 build_component_ref (tree record_variable, tree component,
1714 tree field, bool no_fold_p)
1716 tree ref = build_simple_component_ref (record_variable, component, field,
1722 /* If FIELD was specified, assume this is an invalid user field so raise
1723 Constraint_Error. Otherwise, we have no type to return so abort. */
1725 return build1 (NULL_EXPR, TREE_TYPE (field),
1726 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1727 N_Raise_Constraint_Error));
1730 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1731 identically. Process the case where a GNAT_PROC to call is provided. */
1734 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1735 Entity_Id gnat_proc, Entity_Id gnat_pool)
1737 tree gnu_proc = gnat_to_gnu (gnat_proc);
1738 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1741 /* The storage pools are obviously always tagged types, but the
1742 secondary stack uses the same mechanism and is not tagged. */
1743 if (Is_Tagged_Type (Etype (gnat_pool)))
1745 /* The size is the third parameter; the alignment is the
1747 Entity_Id gnat_size_type
1748 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1749 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1751 tree gnu_pool = gnat_to_gnu (gnat_pool);
1752 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1753 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1755 gnu_size = convert (gnu_size_type, gnu_size);
1756 gnu_align = convert (gnu_size_type, gnu_align);
1758 /* The first arg is always the address of the storage pool; next
1759 comes the address of the object, for a deallocator, then the
1760 size and alignment. */
1762 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1763 gnu_proc_addr, 4, gnu_pool_addr,
1764 gnu_obj, gnu_size, gnu_align);
1766 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1767 gnu_proc_addr, 3, gnu_pool_addr,
1768 gnu_size, gnu_align);
1771 /* Secondary stack case. */
1774 /* The size is the second parameter. */
1775 Entity_Id gnat_size_type
1776 = Etype (Next_Formal (First_Formal (gnat_proc)));
1777 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1779 gnu_size = convert (gnu_size_type, gnu_size);
1781 /* The first arg is the address of the object, for a deallocator,
1784 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1785 gnu_proc_addr, 2, gnu_obj, gnu_size);
1787 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1788 gnu_proc_addr, 1, gnu_size);
1791 TREE_SIDE_EFFECTS (gnu_call) = 1;
1795 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
1796 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1797 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1801 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1803 /* When the DATA_TYPE alignment is stricter than what malloc offers
1804 (super-aligned case), we allocate an "aligning" wrapper type and return
1805 the address of its single data field with the malloc's return value
1806 stored just in front. */
1808 unsigned int data_align = TYPE_ALIGN (data_type);
1809 unsigned int default_allocator_alignment
1810 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1813 = ((data_align > default_allocator_alignment)
1814 ? make_aligning_type (data_type, data_align, data_size,
1815 default_allocator_alignment,
1816 POINTER_SIZE / BITS_PER_UNIT)
1820 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1824 /* On VMS, if 64-bit memory is disabled or pointers are 64-bit and the
1825 allocator size is 32-bit or Convention C, allocate 32-bit memory. */
1826 if (TARGET_ABI_OPEN_VMS
1827 && (!TARGET_MALLOC64
1828 || (POINTER_SIZE == 64
1829 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1830 || Convention (Etype (gnat_node)) == Convention_C))))
1831 malloc_ptr = build_call_1_expr (malloc32_decl, size_to_malloc);
1833 malloc_ptr = build_call_1_expr (malloc_decl, size_to_malloc);
1837 /* Latch malloc's return value and get a pointer to the aligning field
1839 tree storage_ptr = gnat_protect_expr (malloc_ptr);
1841 tree aligning_record_addr
1842 = convert (build_pointer_type (aligning_type), storage_ptr);
1844 tree aligning_record
1845 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1848 = build_component_ref (aligning_record, NULL_TREE,
1849 TYPE_FIELDS (aligning_type), false);
1851 tree aligning_field_addr
1852 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1854 /* Then arrange to store the allocator's return value ahead
1856 tree storage_ptr_slot_addr
1857 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1858 convert (ptr_void_type_node, aligning_field_addr),
1859 size_int (-(HOST_WIDE_INT) POINTER_SIZE
1862 tree storage_ptr_slot
1863 = build_unary_op (INDIRECT_REF, NULL_TREE,
1864 convert (build_pointer_type (ptr_void_type_node),
1865 storage_ptr_slot_addr));
1868 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
1869 build_binary_op (MODIFY_EXPR, NULL_TREE,
1870 storage_ptr_slot, storage_ptr),
1871 aligning_field_addr);
1877 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
1878 designated by DATA_PTR using the __gnat_free entry point. */
1881 maybe_wrap_free (tree data_ptr, tree data_type)
1883 /* In the regular alignment case, we pass the data pointer straight to free.
1884 In the superaligned case, we need to retrieve the initial allocator
1885 return value, stored in front of the data block at allocation time. */
1887 unsigned int data_align = TYPE_ALIGN (data_type);
1888 unsigned int default_allocator_alignment
1889 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1893 if (data_align > default_allocator_alignment)
1895 /* DATA_FRONT_PTR (void *)
1896 = (void *)DATA_PTR - (void *)sizeof (void *)) */
1899 (POINTER_PLUS_EXPR, ptr_void_type_node,
1900 convert (ptr_void_type_node, data_ptr),
1901 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
1903 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
1906 (INDIRECT_REF, NULL_TREE,
1907 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
1910 free_ptr = data_ptr;
1912 return build_call_1_expr (free_decl, free_ptr);
1915 /* Build a GCC tree to call an allocation or deallocation function.
1916 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1917 generate an allocator.
1919 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
1920 object type, used to determine the to-be-honored address alignment.
1921 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
1922 pool to use. If not present, malloc and free are used. GNAT_NODE is used
1923 to provide an error location for restriction violation messages. */
1926 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
1927 Entity_Id gnat_proc, Entity_Id gnat_pool,
1930 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1932 /* Explicit proc to call ? This one is assumed to deal with the type
1933 alignment constraints. */
1934 if (Present (gnat_proc))
1935 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
1936 gnat_proc, gnat_pool);
1938 /* Otherwise, object to "free" or "malloc" with possible special processing
1939 for alignments stricter than what the default allocator honors. */
1941 return maybe_wrap_free (gnu_obj, gnu_type);
1944 /* Assert that we no longer can be called with this special pool. */
1945 gcc_assert (gnat_pool != -1);
1947 /* Check that we aren't violating the associated restriction. */
1948 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
1949 Check_No_Implicit_Heap_Alloc (gnat_node);
1951 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
1955 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1956 initial value is INIT, if INIT is nonzero. Convert the expression to
1957 RESULT_TYPE, which must be some type of pointer. Return the tree.
1959 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1960 the storage pool to use. GNAT_NODE is used to provide an error
1961 location for restriction violation messages. If IGNORE_INIT_TYPE is
1962 true, ignore the type of INIT for the purpose of determining the size;
1963 this will cause the maximum size to be allocated if TYPE is of
1964 self-referential size. */
1967 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1968 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1970 tree size = TYPE_SIZE_UNIT (type);
1973 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1974 if (init && TREE_CODE (init) == NULL_EXPR)
1975 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1977 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1978 sizes of the object and its template. Allocate the whole thing and
1979 fill in the parts that are known. */
1980 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
1983 = build_unc_object_type_from_ptr (result_type, type,
1984 get_identifier ("ALLOC"), false);
1985 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
1986 tree storage_ptr_type = build_pointer_type (storage_type);
1988 tree template_cons = NULL_TREE;
1990 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1993 /* If the size overflows, pass -1 so the allocator will raise
1995 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1996 size = ssize_int (-1);
1998 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
1999 gnat_proc, gnat_pool, gnat_node);
2000 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2002 if (TYPE_IS_PADDING_P (type))
2004 type = TREE_TYPE (TYPE_FIELDS (type));
2006 init = convert (type, init);
2009 /* If there is an initializing expression, make a constructor for
2010 the entire object including the bounds and copy it into the
2011 object. If there is no initializing expression, just set the
2015 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
2017 template_cons = tree_cons (TYPE_FIELDS (storage_type),
2018 build_template (template_type, type,
2024 build2 (COMPOUND_EXPR, storage_ptr_type,
2026 (MODIFY_EXPR, storage_type,
2027 build_unary_op (INDIRECT_REF, NULL_TREE,
2028 convert (storage_ptr_type, storage)),
2029 gnat_build_constructor (storage_type, template_cons)),
2030 convert (storage_ptr_type, storage)));
2034 (COMPOUND_EXPR, result_type,
2036 (MODIFY_EXPR, template_type,
2038 (build_unary_op (INDIRECT_REF, NULL_TREE,
2039 convert (storage_ptr_type, storage)),
2040 NULL_TREE, TYPE_FIELDS (storage_type), false),
2041 build_template (template_type, type, NULL_TREE)),
2042 convert (result_type, convert (storage_ptr_type, storage)));
2045 /* If we have an initializing expression, see if its size is simpler
2046 than the size from the type. */
2047 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2048 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2049 || CONTAINS_PLACEHOLDER_P (size)))
2050 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2052 /* If the size is still self-referential, reference the initializing
2053 expression, if it is present. If not, this must have been a
2054 call to allocate a library-level object, in which case we use
2055 the maximum size. */
2056 if (CONTAINS_PLACEHOLDER_P (size))
2058 if (!ignore_init_type && init)
2059 size = substitute_placeholder_in_expr (size, init);
2061 size = max_size (size, true);
2064 /* If the size overflows, pass -1 so the allocator will raise
2066 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2067 size = ssize_int (-1);
2069 result = convert (result_type,
2070 build_call_alloc_dealloc (NULL_TREE, size, type,
2071 gnat_proc, gnat_pool,
2074 /* If we have an initial value, protect the new address, assign the value
2075 and return the address with a COMPOUND_EXPR. */
2078 result = gnat_protect_expr (result);
2080 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2082 (MODIFY_EXPR, NULL_TREE,
2083 build_unary_op (INDIRECT_REF,
2084 TREE_TYPE (TREE_TYPE (result)), result),
2089 return convert (result_type, result);
2092 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2093 GNAT_FORMAL is how we find the descriptor record. GNAT_ACTUAL is
2094 how we derive the source location to raise C_E on an out of range
2098 fill_vms_descriptor (tree expr, Entity_Id gnat_formal, Node_Id gnat_actual)
2100 tree parm_decl = get_gnu_tree (gnat_formal);
2101 tree record_type = TREE_TYPE (TREE_TYPE (parm_decl));
2102 tree const_list = NULL_TREE, field;
2103 const bool do_range_check
2105 IDENTIFIER_POINTER (DECL_NAME (TYPE_FIELDS (record_type))));
2107 expr = maybe_unconstrained_array (expr);
2108 gnat_mark_addressable (expr);
2110 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2112 tree conexpr = convert (TREE_TYPE (field),
2113 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2114 (DECL_INITIAL (field), expr));
2116 /* Check to ensure that only 32-bit pointers are passed in
2117 32-bit descriptors */
2119 && strcmp (IDENTIFIER_POINTER (DECL_NAME (field)), "POINTER") == 0)
2122 = build_pointer_type_for_mode (void_type_node, DImode, false);
2123 tree addr64expr = build_unary_op (ADDR_EXPR, pointer64type, expr);
2125 = build_int_cstu (long_integer_type_node, 0x80000000);
2127 add_stmt (build3 (COND_EXPR, void_type_node,
2128 build_binary_op (GE_EXPR, boolean_type_node,
2129 convert (long_integer_type_node,
2132 build_call_raise (CE_Range_Check_Failed,
2134 N_Raise_Constraint_Error),
2137 const_list = tree_cons (field, conexpr, const_list);
2140 return gnat_build_constructor (record_type, nreverse (const_list));
2143 /* Indicate that we need to take the address of T and that it therefore
2144 should not be allocated in a register. Returns true if successful. */
2147 gnat_mark_addressable (tree t)
2150 switch (TREE_CODE (t))
2155 case ARRAY_RANGE_REF:
2158 case VIEW_CONVERT_EXPR:
2159 case NON_LVALUE_EXPR:
2161 t = TREE_OPERAND (t, 0);
2165 t = TREE_OPERAND (t, 1);
2169 TREE_ADDRESSABLE (t) = 1;
2175 TREE_ADDRESSABLE (t) = 1;
2179 TREE_ADDRESSABLE (t) = 1;
2183 return DECL_CONST_CORRESPONDING_VAR (t)
2184 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2191 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2192 but we know how to handle our own nodes. */
2195 gnat_save_expr (tree exp)
2197 tree type = TREE_TYPE (exp);
2198 enum tree_code code = TREE_CODE (exp);
2200 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2203 if (code == UNCONSTRAINED_ARRAY_REF)
2205 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2206 TREE_READONLY (t) = TYPE_READONLY (type);
2210 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2211 This may be more efficient, but will also allow us to more easily find
2212 the match for the PLACEHOLDER_EXPR. */
2213 if (code == COMPONENT_REF
2214 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2215 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2216 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2218 return save_expr (exp);
2221 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2222 is optimized under the assumption that EXP's value doesn't change before
2223 its subsequent reuse(s) except through its potential reevaluation. */
2226 gnat_protect_expr (tree exp)
2228 tree type = TREE_TYPE (exp);
2229 enum tree_code code = TREE_CODE (exp);
2231 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2234 /* If EXP has no side effects, we theoritically don't need to do anything.
2235 However, we may be recursively passed more and more complex expressions
2236 involving checks which will be reused multiple times and eventually be
2237 unshared for gimplification; in order to avoid a complexity explosion
2238 at that point, we protect any expressions more complex than a simple
2239 arithmetic expression. */
2240 if (!TREE_SIDE_EFFECTS (exp)
2241 && !EXPRESSION_CLASS_P (skip_simple_arithmetic (exp)))
2244 /* If this is a conversion, protect what's inside the conversion. */
2245 if (code == NON_LVALUE_EXPR
2246 || CONVERT_EXPR_CODE_P (code)
2247 || code == VIEW_CONVERT_EXPR)
2248 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2250 /* If we're indirectly referencing something, we only need to protect the
2251 address since the data itself can't change in these situations. */
2252 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2254 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2255 TREE_READONLY (t) = TYPE_READONLY (type);
2259 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2260 This may be more efficient, but will also allow us to more easily find
2261 the match for the PLACEHOLDER_EXPR. */
2262 if (code == COMPONENT_REF
2263 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2264 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2265 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2267 /* If this is a fat pointer or something that can be placed in a register,
2268 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2269 returned via invisible reference in most ABIs so the temporary will
2270 directly be filled by the callee. */
2271 if (TYPE_IS_FAT_POINTER_P (type)
2272 || TYPE_MODE (type) != BLKmode
2273 || code == CALL_EXPR)
2274 return save_expr (exp);
2276 /* Otherwise reference, protect the address and dereference. */
2278 build_unary_op (INDIRECT_REF, type,
2279 save_expr (build_unary_op (ADDR_EXPR,
2280 build_reference_type (type),
2284 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2285 argument to force evaluation of everything. */
2288 gnat_stabilize_reference_1 (tree e, bool force)
2290 enum tree_code code = TREE_CODE (e);
2291 tree type = TREE_TYPE (e);
2294 /* We cannot ignore const expressions because it might be a reference
2295 to a const array but whose index contains side-effects. But we can
2296 ignore things that are actual constant or that already have been
2297 handled by this function. */
2298 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2301 switch (TREE_CODE_CLASS (code))
2303 case tcc_exceptional:
2304 case tcc_declaration:
2305 case tcc_comparison:
2306 case tcc_expression:
2309 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2310 fat pointer. This may be more efficient, but will also allow
2311 us to more easily find the match for the PLACEHOLDER_EXPR. */
2312 if (code == COMPONENT_REF
2313 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2315 = build3 (code, type,
2316 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2317 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2318 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2319 so that it will only be evaluated once. */
2320 /* The tcc_reference and tcc_comparison classes could be handled as
2321 below, but it is generally faster to only evaluate them once. */
2322 else if (TREE_SIDE_EFFECTS (e) || force)
2323 return save_expr (e);
2329 /* Recursively stabilize each operand. */
2331 = build2 (code, type,
2332 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2333 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2337 /* Recursively stabilize each operand. */
2339 = build1 (code, type,
2340 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2347 /* See similar handling in gnat_stabilize_reference. */
2348 TREE_READONLY (result) = TREE_READONLY (e);
2349 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2350 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2355 /* This is equivalent to stabilize_reference in tree.c but we know how to
2356 handle our own nodes and we take extra arguments. FORCE says whether to
2357 force evaluation of everything. We set SUCCESS to true unless we walk
2358 through something we don't know how to stabilize. */
2361 gnat_stabilize_reference (tree ref, bool force, bool *success)
2363 tree type = TREE_TYPE (ref);
2364 enum tree_code code = TREE_CODE (ref);
2367 /* Assume we'll success unless proven otherwise. */
2377 /* No action is needed in this case. */
2383 case FIX_TRUNC_EXPR:
2384 case VIEW_CONVERT_EXPR:
2386 = build1 (code, type,
2387 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2392 case UNCONSTRAINED_ARRAY_REF:
2393 result = build1 (code, type,
2394 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2399 result = build3 (COMPONENT_REF, type,
2400 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2402 TREE_OPERAND (ref, 1), NULL_TREE);
2406 result = build3 (BIT_FIELD_REF, type,
2407 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2409 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2411 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2416 case ARRAY_RANGE_REF:
2417 result = build4 (code, type,
2418 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2420 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2422 NULL_TREE, NULL_TREE);
2426 result = gnat_stabilize_reference_1 (ref, force);
2430 result = build2 (COMPOUND_EXPR, type,
2431 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2433 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2438 /* Constructors with 1 element are used extensively to formally
2439 convert objects to special wrapping types. */
2440 if (TREE_CODE (type) == RECORD_TYPE
2441 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2444 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2446 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2448 = build_constructor_single (type, index,
2449 gnat_stabilize_reference_1 (value,
2461 ref = error_mark_node;
2463 /* ... fall through to failure ... */
2465 /* If arg isn't a kind of lvalue we recognize, make no change.
2466 Caller should recognize the error for an invalid lvalue. */
2473 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2474 may not be sustained across some paths, such as the way via build1 for
2475 INDIRECT_REF. We reset those flags here in the general case, which is
2476 consistent with the GCC version of this routine.
2478 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2479 paths introduce side-effects where there was none initially (e.g. if a
2480 SAVE_EXPR is built) and we also want to keep track of that. */
2481 TREE_READONLY (result) = TREE_READONLY (ref);
2482 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2483 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);