1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
34 #include "tree-inline.h"
51 static tree find_common_type (tree, tree);
52 static tree compare_arrays (tree, tree, tree);
53 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54 static tree build_simple_component_ref (tree, tree, tree, bool);
56 /* Return the base type of TYPE. */
59 get_base_type (tree type)
61 if (TREE_CODE (type) == RECORD_TYPE
62 && TYPE_JUSTIFIED_MODULAR_P (type))
63 type = TREE_TYPE (TYPE_FIELDS (type));
65 while (TREE_TYPE (type)
66 && (TREE_CODE (type) == INTEGER_TYPE
67 || TREE_CODE (type) == REAL_TYPE))
68 type = TREE_TYPE (type);
73 /* EXP is a GCC tree representing an address. See if we can find how
74 strictly the object at that address is aligned. Return that alignment
75 in bits. If we don't know anything about the alignment, return 0. */
78 known_alignment (tree exp)
80 unsigned int this_alignment;
81 unsigned int lhs, rhs;
83 switch (TREE_CODE (exp))
86 case VIEW_CONVERT_EXPR:
88 /* Conversions between pointers and integers don't change the alignment
89 of the underlying object. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
94 /* The value of a COMPOUND_EXPR is that of it's second operand. */
95 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
100 /* If two address are added, the alignment of the result is the
101 minimum of the two alignments. */
102 lhs = known_alignment (TREE_OPERAND (exp, 0));
103 rhs = known_alignment (TREE_OPERAND (exp, 1));
104 this_alignment = MIN (lhs, rhs);
107 case POINTER_PLUS_EXPR:
108 lhs = known_alignment (TREE_OPERAND (exp, 0));
109 rhs = known_alignment (TREE_OPERAND (exp, 1));
110 /* If we don't know the alignment of the offset, we assume that
113 this_alignment = lhs;
115 this_alignment = MIN (lhs, rhs);
119 /* If there is a choice between two values, use the smallest one. */
120 lhs = known_alignment (TREE_OPERAND (exp, 1));
121 rhs = known_alignment (TREE_OPERAND (exp, 2));
122 this_alignment = MIN (lhs, rhs);
127 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
128 /* The first part of this represents the lowest bit in the constant,
129 but it is originally in bytes, not bits. */
130 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
135 /* If we know the alignment of just one side, use it. Otherwise,
136 use the product of the alignments. */
137 lhs = known_alignment (TREE_OPERAND (exp, 0));
138 rhs = known_alignment (TREE_OPERAND (exp, 1));
141 this_alignment = rhs;
143 this_alignment = lhs;
145 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
149 /* A bit-and expression is as aligned as the maximum alignment of the
150 operands. We typically get here for a complex lhs and a constant
151 negative power of two on the rhs to force an explicit alignment, so
152 don't bother looking at the lhs. */
153 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
157 this_alignment = expr_align (TREE_OPERAND (exp, 0));
162 tree t = maybe_inline_call_in_expr (exp);
164 return known_alignment (t);
167 /* Fall through... */
170 /* For other pointer expressions, we assume that the pointed-to object
171 is at least as aligned as the pointed-to type. Beware that we can
172 have a dummy type here (e.g. a Taft Amendment type), for which the
173 alignment is meaningless and should be ignored. */
174 if (POINTER_TYPE_P (TREE_TYPE (exp))
175 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
176 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
182 return this_alignment;
185 /* We have a comparison or assignment operation on two types, T1 and T2, which
186 are either both array types or both record types. T1 is assumed to be for
187 the left hand side operand, and T2 for the right hand side. Return the
188 type that both operands should be converted to for the operation, if any.
189 Otherwise return zero. */
192 find_common_type (tree t1, tree t2)
194 /* ??? As of today, various constructs lead here with types of different
195 sizes even when both constants (e.g. tagged types, packable vs regular
196 component types, padded vs unpadded types, ...). While some of these
197 would better be handled upstream (types should be made consistent before
198 calling into build_binary_op), some others are really expected and we
199 have to be careful. */
201 /* We must prevent writing more than what the target may hold if this is for
202 an assignment and the case of tagged types is handled in build_binary_op
203 so use the lhs type if it is known to be smaller, or of constant size and
204 the rhs type is not, whatever the modes. We also force t1 in case of
205 constant size equality to minimize occurrences of view conversions on the
206 lhs of assignments. */
207 if (TREE_CONSTANT (TYPE_SIZE (t1))
208 && (!TREE_CONSTANT (TYPE_SIZE (t2))
209 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
212 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
213 that we will not have any alignment problems since, if we did, the
214 non-BLKmode type could not have been used. */
215 if (TYPE_MODE (t1) != BLKmode)
218 /* If the rhs type is of constant size, use it whatever the modes. At
219 this point it is known to be smaller, or of constant size and the
221 if (TREE_CONSTANT (TYPE_SIZE (t2)))
224 /* Otherwise, if the rhs type is non-BLKmode, use it. */
225 if (TYPE_MODE (t2) != BLKmode)
228 /* In this case, both types have variable size and BLKmode. It's
229 probably best to leave the "type mismatch" because changing it
230 could cause a bad self-referential reference. */
234 /* Return an expression tree representing an equality comparison of A1 and A2,
235 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
237 Two arrays are equal in one of two ways: (1) if both have zero length in
238 some dimension (not necessarily the same dimension) or (2) if the lengths
239 in each dimension are equal and the data is equal. We perform the length
240 tests in as efficient a manner as possible. */
243 compare_arrays (tree result_type, tree a1, tree a2)
245 tree result = convert (result_type, boolean_true_node);
246 tree a1_is_null = convert (result_type, boolean_false_node);
247 tree a2_is_null = convert (result_type, boolean_false_node);
248 tree t1 = TREE_TYPE (a1);
249 tree t2 = TREE_TYPE (a2);
250 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
251 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
252 bool length_zero_p = false;
254 /* If either operand has side-effects, they have to be evaluated only once
255 in spite of the multiple references to the operand in the comparison. */
256 if (a1_side_effects_p)
257 a1 = gnat_protect_expr (a1);
259 if (a2_side_effects_p)
260 a2 = gnat_protect_expr (a2);
262 /* Process each dimension separately and compare the lengths. If any
263 dimension has a length known to be zero, set LENGTH_ZERO_P to true
264 in order to suppress the comparison of the data at the end. */
265 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
267 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
268 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
269 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
270 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
271 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
273 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
275 tree comparison, this_a1_is_null, this_a2_is_null;
277 /* If the length of the first array is a constant, swap our operands
278 unless the length of the second array is the constant zero. */
279 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
284 tem = a1, a1 = a2, a2 = tem;
285 tem = t1, t1 = t2, t2 = tem;
286 tem = lb1, lb1 = lb2, lb2 = tem;
287 tem = ub1, ub1 = ub2, ub2 = tem;
288 tem = length1, length1 = length2, length2 = tem;
289 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
290 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
291 a2_side_effects_p = btem;
294 /* If the length of the second array is the constant zero, we can just
295 use the original stored bounds for the first array and see whether
296 last < first holds. */
297 if (integer_zerop (length2))
299 length_zero_p = true;
301 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
302 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
304 comparison = build_binary_op (LT_EXPR, result_type, ub1, lb1);
305 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
306 if (EXPR_P (comparison))
307 SET_EXPR_LOCATION (comparison, input_location);
309 this_a1_is_null = comparison;
310 this_a2_is_null = convert (result_type, boolean_true_node);
313 /* Otherwise, if the length is some other constant value, we know that
314 this dimension in the second array cannot be superflat, so we can
315 just use its length computed from the actual stored bounds. */
316 else if (TREE_CODE (length2) == INTEGER_CST)
320 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
321 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
322 /* Note that we know that UB2 and LB2 are constant and hence
323 cannot contain a PLACEHOLDER_EXPR. */
324 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
325 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
326 bt = get_base_type (TREE_TYPE (ub1));
329 = build_binary_op (EQ_EXPR, result_type,
330 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
331 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
332 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
333 if (EXPR_P (comparison))
334 SET_EXPR_LOCATION (comparison, input_location);
336 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
337 if (EXPR_P (this_a1_is_null))
338 SET_EXPR_LOCATION (this_a1_is_null, input_location);
340 this_a2_is_null = convert (result_type, boolean_false_node);
343 /* Otherwise, compare the computed lengths. */
346 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
347 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
350 = build_binary_op (EQ_EXPR, result_type, length1, length2);
351 if (EXPR_P (comparison))
352 SET_EXPR_LOCATION (comparison, input_location);
354 this_a1_is_null = build_binary_op (EQ_EXPR, result_type, length1,
356 if (EXPR_P (this_a1_is_null))
357 SET_EXPR_LOCATION (this_a1_is_null, input_location);
359 this_a2_is_null = build_binary_op (EQ_EXPR, result_type, length2,
361 if (EXPR_P (this_a2_is_null))
362 SET_EXPR_LOCATION (this_a2_is_null, input_location);
365 /* Append expressions for this dimension to the final expressions. */
366 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
369 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
370 this_a1_is_null, a1_is_null);
372 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
373 this_a2_is_null, a2_is_null);
379 /* Unless the length of some dimension is known to be zero, compare the
380 data in the array. */
383 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
388 a1 = convert (type, a1),
389 a2 = convert (type, a2);
392 comparison = fold_build2 (EQ_EXPR, result_type, a1, a2);
393 if (EXPR_P (comparison))
394 SET_EXPR_LOCATION (comparison, input_location);
397 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
400 /* The result is also true if both sizes are zero. */
401 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
402 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
403 a1_is_null, a2_is_null),
406 /* If either operand has side-effects, they have to be evaluated before
407 starting the comparison above since the place they would be otherwise
408 evaluated could be wrong. */
409 if (a1_side_effects_p)
410 result = build2 (COMPOUND_EXPR, result_type, a1, result);
412 if (a2_side_effects_p)
413 result = build2 (COMPOUND_EXPR, result_type, a2, result);
418 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
419 type TYPE. We know that TYPE is a modular type with a nonbinary
423 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
426 tree modulus = TYPE_MODULUS (type);
427 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
428 unsigned int precision;
429 bool unsignedp = true;
433 /* If this is an addition of a constant, convert it to a subtraction
434 of a constant since we can do that faster. */
435 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
437 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
438 op_code = MINUS_EXPR;
441 /* For the logical operations, we only need PRECISION bits. For
442 addition and subtraction, we need one more and for multiplication we
443 need twice as many. But we never want to make a size smaller than
445 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
446 needed_precision += 1;
447 else if (op_code == MULT_EXPR)
448 needed_precision *= 2;
450 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
452 /* Unsigned will do for everything but subtraction. */
453 if (op_code == MINUS_EXPR)
456 /* If our type is the wrong signedness or isn't wide enough, make a new
457 type and convert both our operands to it. */
458 if (TYPE_PRECISION (op_type) < precision
459 || TYPE_UNSIGNED (op_type) != unsignedp)
461 /* Copy the node so we ensure it can be modified to make it modular. */
462 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
463 modulus = convert (op_type, modulus);
464 SET_TYPE_MODULUS (op_type, modulus);
465 TYPE_MODULAR_P (op_type) = 1;
466 lhs = convert (op_type, lhs);
467 rhs = convert (op_type, rhs);
470 /* Do the operation, then we'll fix it up. */
471 result = fold_build2 (op_code, op_type, lhs, rhs);
473 /* For multiplication, we have no choice but to do a full modulus
474 operation. However, we want to do this in the narrowest
476 if (op_code == MULT_EXPR)
478 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
479 modulus = convert (div_type, modulus);
480 SET_TYPE_MODULUS (div_type, modulus);
481 TYPE_MODULAR_P (div_type) = 1;
482 result = convert (op_type,
483 fold_build2 (TRUNC_MOD_EXPR, div_type,
484 convert (div_type, result), modulus));
487 /* For subtraction, add the modulus back if we are negative. */
488 else if (op_code == MINUS_EXPR)
490 result = gnat_protect_expr (result);
491 result = fold_build3 (COND_EXPR, op_type,
492 fold_build2 (LT_EXPR, boolean_type_node, result,
493 convert (op_type, integer_zero_node)),
494 fold_build2 (PLUS_EXPR, op_type, result, modulus),
498 /* For the other operations, subtract the modulus if we are >= it. */
501 result = gnat_protect_expr (result);
502 result = fold_build3 (COND_EXPR, op_type,
503 fold_build2 (GE_EXPR, boolean_type_node,
505 fold_build2 (MINUS_EXPR, op_type,
510 return convert (type, result);
513 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
514 desired for the result. Usually the operation is to be performed
515 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
516 in which case the type to be used will be derived from the operands.
518 This function is very much unlike the ones for C and C++ since we
519 have already done any type conversion and matching required. All we
520 have to do here is validate the work done by SEM and handle subtypes. */
523 build_binary_op (enum tree_code op_code, tree result_type,
524 tree left_operand, tree right_operand)
526 tree left_type = TREE_TYPE (left_operand);
527 tree right_type = TREE_TYPE (right_operand);
528 tree left_base_type = get_base_type (left_type);
529 tree right_base_type = get_base_type (right_type);
530 tree operation_type = result_type;
531 tree best_type = NULL_TREE;
532 tree modulus, result;
533 bool has_side_effects = false;
536 && TREE_CODE (operation_type) == RECORD_TYPE
537 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
538 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
541 && !AGGREGATE_TYPE_P (operation_type)
542 && TYPE_EXTRA_SUBTYPE_P (operation_type))
543 operation_type = get_base_type (operation_type);
545 modulus = (operation_type
546 && TREE_CODE (operation_type) == INTEGER_TYPE
547 && TYPE_MODULAR_P (operation_type)
548 ? TYPE_MODULUS (operation_type) : NULL_TREE);
554 /* If there were integral or pointer conversions on the LHS, remove
555 them; we'll be putting them back below if needed. Likewise for
556 conversions between array and record types, except for justified
557 modular types. But don't do this if the right operand is not
558 BLKmode (for packed arrays) unless we are not changing the mode. */
559 while ((CONVERT_EXPR_P (left_operand)
560 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
561 && (((INTEGRAL_TYPE_P (left_type)
562 || POINTER_TYPE_P (left_type))
563 && (INTEGRAL_TYPE_P (TREE_TYPE
564 (TREE_OPERAND (left_operand, 0)))
565 || POINTER_TYPE_P (TREE_TYPE
566 (TREE_OPERAND (left_operand, 0)))))
567 || (((TREE_CODE (left_type) == RECORD_TYPE
568 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
569 || TREE_CODE (left_type) == ARRAY_TYPE)
570 && ((TREE_CODE (TREE_TYPE
571 (TREE_OPERAND (left_operand, 0)))
573 || (TREE_CODE (TREE_TYPE
574 (TREE_OPERAND (left_operand, 0)))
576 && (TYPE_MODE (right_type) == BLKmode
577 || (TYPE_MODE (left_type)
578 == TYPE_MODE (TREE_TYPE
580 (left_operand, 0))))))))
582 left_operand = TREE_OPERAND (left_operand, 0);
583 left_type = TREE_TYPE (left_operand);
586 /* If a class-wide type may be involved, force use of the RHS type. */
587 if ((TREE_CODE (right_type) == RECORD_TYPE
588 || TREE_CODE (right_type) == UNION_TYPE)
589 && TYPE_ALIGN_OK (right_type))
590 operation_type = right_type;
592 /* If we are copying between padded objects with compatible types, use
593 the padded view of the objects, this is very likely more efficient.
594 Likewise for a padded object that is assigned a constructor, if we
595 can convert the constructor to the inner type, to avoid putting a
596 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
597 actually copied anything. */
598 else if (TYPE_IS_PADDING_P (left_type)
599 && TREE_CONSTANT (TYPE_SIZE (left_type))
600 && ((TREE_CODE (right_operand) == COMPONENT_REF
602 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
603 && gnat_types_compatible_p
605 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
606 || (TREE_CODE (right_operand) == CONSTRUCTOR
607 && !CONTAINS_PLACEHOLDER_P
608 (DECL_SIZE (TYPE_FIELDS (left_type)))))
609 && !integer_zerop (TYPE_SIZE (right_type)))
610 operation_type = left_type;
612 /* Find the best type to use for copying between aggregate types. */
613 else if (((TREE_CODE (left_type) == ARRAY_TYPE
614 && TREE_CODE (right_type) == ARRAY_TYPE)
615 || (TREE_CODE (left_type) == RECORD_TYPE
616 && TREE_CODE (right_type) == RECORD_TYPE))
617 && (best_type = find_common_type (left_type, right_type)))
618 operation_type = best_type;
620 /* Otherwise use the LHS type. */
621 else if (!operation_type)
622 operation_type = left_type;
624 /* Ensure everything on the LHS is valid. If we have a field reference,
625 strip anything that get_inner_reference can handle. Then remove any
626 conversions between types having the same code and mode. And mark
627 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
628 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
629 result = left_operand;
632 tree restype = TREE_TYPE (result);
634 if (TREE_CODE (result) == COMPONENT_REF
635 || TREE_CODE (result) == ARRAY_REF
636 || TREE_CODE (result) == ARRAY_RANGE_REF)
637 while (handled_component_p (result))
638 result = TREE_OPERAND (result, 0);
639 else if (TREE_CODE (result) == REALPART_EXPR
640 || TREE_CODE (result) == IMAGPART_EXPR
641 || (CONVERT_EXPR_P (result)
642 && (((TREE_CODE (restype)
643 == TREE_CODE (TREE_TYPE
644 (TREE_OPERAND (result, 0))))
645 && (TYPE_MODE (TREE_TYPE
646 (TREE_OPERAND (result, 0)))
647 == TYPE_MODE (restype)))
648 || TYPE_ALIGN_OK (restype))))
649 result = TREE_OPERAND (result, 0);
650 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
652 TREE_ADDRESSABLE (result) = 1;
653 result = TREE_OPERAND (result, 0);
659 gcc_assert (TREE_CODE (result) == INDIRECT_REF
660 || TREE_CODE (result) == NULL_EXPR
663 /* Convert the right operand to the operation type unless it is
664 either already of the correct type or if the type involves a
665 placeholder, since the RHS may not have the same record type. */
666 if (operation_type != right_type
667 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
669 right_operand = convert (operation_type, right_operand);
670 right_type = operation_type;
673 /* If the left operand is not of the same type as the operation
674 type, wrap it up in a VIEW_CONVERT_EXPR. */
675 if (left_type != operation_type)
676 left_operand = unchecked_convert (operation_type, left_operand, false);
678 has_side_effects = true;
684 operation_type = TREE_TYPE (left_type);
686 /* ... fall through ... */
688 case ARRAY_RANGE_REF:
689 /* First look through conversion between type variants. Note that
690 this changes neither the operation type nor the type domain. */
691 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
692 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
693 == TYPE_MAIN_VARIANT (left_type))
695 left_operand = TREE_OPERAND (left_operand, 0);
696 left_type = TREE_TYPE (left_operand);
699 /* For a range, make sure the element type is consistent. */
700 if (op_code == ARRAY_RANGE_REF
701 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
702 operation_type = build_array_type (TREE_TYPE (left_type),
703 TYPE_DOMAIN (operation_type));
705 /* Then convert the right operand to its base type. This will prevent
706 unneeded sign conversions when sizetype is wider than integer. */
707 right_operand = convert (right_base_type, right_operand);
708 right_operand = convert (sizetype, right_operand);
710 if (!TREE_CONSTANT (right_operand)
711 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
712 gnat_mark_addressable (left_operand);
717 case TRUTH_ANDIF_EXPR:
718 case TRUTH_ORIF_EXPR:
722 #ifdef ENABLE_CHECKING
723 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
725 operation_type = left_base_type;
726 left_operand = convert (operation_type, left_operand);
727 right_operand = convert (operation_type, right_operand);
736 #ifdef ENABLE_CHECKING
737 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
739 /* If either operand is a NULL_EXPR, just return a new one. */
740 if (TREE_CODE (left_operand) == NULL_EXPR)
741 return build2 (op_code, result_type,
742 build1 (NULL_EXPR, integer_type_node,
743 TREE_OPERAND (left_operand, 0)),
746 else if (TREE_CODE (right_operand) == NULL_EXPR)
747 return build2 (op_code, result_type,
748 build1 (NULL_EXPR, integer_type_node,
749 TREE_OPERAND (right_operand, 0)),
752 /* If either object is a justified modular types, get the
753 fields from within. */
754 if (TREE_CODE (left_type) == RECORD_TYPE
755 && TYPE_JUSTIFIED_MODULAR_P (left_type))
757 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
759 left_type = TREE_TYPE (left_operand);
760 left_base_type = get_base_type (left_type);
763 if (TREE_CODE (right_type) == RECORD_TYPE
764 && TYPE_JUSTIFIED_MODULAR_P (right_type))
766 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
768 right_type = TREE_TYPE (right_operand);
769 right_base_type = get_base_type (right_type);
772 /* If both objects are arrays, compare them specially. */
773 if ((TREE_CODE (left_type) == ARRAY_TYPE
774 || (TREE_CODE (left_type) == INTEGER_TYPE
775 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
776 && (TREE_CODE (right_type) == ARRAY_TYPE
777 || (TREE_CODE (right_type) == INTEGER_TYPE
778 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
780 result = compare_arrays (result_type, left_operand, right_operand);
782 if (op_code == NE_EXPR)
783 result = invert_truthvalue (result);
785 gcc_assert (op_code == EQ_EXPR);
790 /* Otherwise, the base types must be the same, unless they are both fat
791 pointer types or record types. In the latter case, use the best type
792 and convert both operands to that type. */
793 if (left_base_type != right_base_type)
795 if (TYPE_IS_FAT_POINTER_P (left_base_type)
796 && TYPE_IS_FAT_POINTER_P (right_base_type))
798 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
799 == TYPE_MAIN_VARIANT (right_base_type));
800 best_type = left_base_type;
803 else if (TREE_CODE (left_base_type) == RECORD_TYPE
804 && TREE_CODE (right_base_type) == RECORD_TYPE)
806 /* The only way this is permitted is if both types have the same
807 name. In that case, one of them must not be self-referential.
808 Use it as the best type. Even better with a fixed size. */
809 gcc_assert (TYPE_NAME (left_base_type)
810 && TYPE_NAME (left_base_type)
811 == TYPE_NAME (right_base_type));
813 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
814 best_type = left_base_type;
815 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
816 best_type = right_base_type;
817 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
818 best_type = left_base_type;
819 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
820 best_type = right_base_type;
828 left_operand = convert (best_type, left_operand);
829 right_operand = convert (best_type, right_operand);
833 left_operand = convert (left_base_type, left_operand);
834 right_operand = convert (right_base_type, right_operand);
837 /* If we are comparing a fat pointer against zero, we just need to
838 compare the data pointer. */
839 if (TYPE_IS_FAT_POINTER_P (left_base_type)
840 && TREE_CODE (right_operand) == CONSTRUCTOR
841 && integer_zerop (VEC_index (constructor_elt,
842 CONSTRUCTOR_ELTS (right_operand),
846 = build_component_ref (left_operand, NULL_TREE,
847 TYPE_FIELDS (left_base_type), false);
849 = convert (TREE_TYPE (left_operand), integer_zero_node);
859 /* The RHS of a shift can be any type. Also, ignore any modulus
860 (we used to abort, but this is needed for unchecked conversion
861 to modular types). Otherwise, processing is the same as normal. */
862 gcc_assert (operation_type == left_base_type);
864 left_operand = convert (operation_type, left_operand);
870 /* For binary modulus, if the inputs are in range, so are the
872 if (modulus && integer_pow2p (modulus))
877 gcc_assert (TREE_TYPE (result_type) == left_base_type
878 && TREE_TYPE (result_type) == right_base_type);
879 left_operand = convert (left_base_type, left_operand);
880 right_operand = convert (right_base_type, right_operand);
883 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
884 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
885 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
886 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
887 /* These always produce results lower than either operand. */
891 case POINTER_PLUS_EXPR:
892 gcc_assert (operation_type == left_base_type
893 && sizetype == right_base_type);
894 left_operand = convert (operation_type, left_operand);
895 right_operand = convert (sizetype, right_operand);
898 case PLUS_NOMOD_EXPR:
899 case MINUS_NOMOD_EXPR:
900 if (op_code == PLUS_NOMOD_EXPR)
903 op_code = MINUS_EXPR;
906 /* ... fall through ... */
910 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
911 other compilers. Contrary to C, Ada doesn't allow arithmetics in
912 these types but can generate addition/subtraction for Succ/Pred. */
914 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
915 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
916 operation_type = left_base_type = right_base_type
917 = gnat_type_for_mode (TYPE_MODE (operation_type),
918 TYPE_UNSIGNED (operation_type));
920 /* ... fall through ... */
924 /* The result type should be the same as the base types of the
925 both operands (and they should be the same). Convert
926 everything to the result type. */
928 gcc_assert (operation_type == left_base_type
929 && left_base_type == right_base_type);
930 left_operand = convert (operation_type, left_operand);
931 right_operand = convert (operation_type, right_operand);
934 if (modulus && !integer_pow2p (modulus))
936 result = nonbinary_modular_operation (op_code, operation_type,
937 left_operand, right_operand);
940 /* If either operand is a NULL_EXPR, just return a new one. */
941 else if (TREE_CODE (left_operand) == NULL_EXPR)
942 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
943 else if (TREE_CODE (right_operand) == NULL_EXPR)
944 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
945 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
946 result = fold (build4 (op_code, operation_type, left_operand,
947 right_operand, NULL_TREE, NULL_TREE));
950 = fold_build2 (op_code, operation_type, left_operand, right_operand);
952 TREE_SIDE_EFFECTS (result) |= has_side_effects;
953 TREE_CONSTANT (result)
954 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
955 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
957 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
958 && TYPE_VOLATILE (operation_type))
959 TREE_THIS_VOLATILE (result) = 1;
961 /* If we are working with modular types, perform the MOD operation
962 if something above hasn't eliminated the need for it. */
964 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
965 convert (operation_type, modulus));
967 if (result_type && result_type != operation_type)
968 result = convert (result_type, result);
973 /* Similar, but for unary operations. */
976 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
978 tree type = TREE_TYPE (operand);
979 tree base_type = get_base_type (type);
980 tree operation_type = result_type;
982 bool side_effects = false;
985 && TREE_CODE (operation_type) == RECORD_TYPE
986 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
987 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
990 && !AGGREGATE_TYPE_P (operation_type)
991 && TYPE_EXTRA_SUBTYPE_P (operation_type))
992 operation_type = get_base_type (operation_type);
999 result_type = operation_type = TREE_TYPE (type);
1001 gcc_assert (result_type == TREE_TYPE (type));
1003 result = fold_build1 (op_code, operation_type, operand);
1006 case TRUTH_NOT_EXPR:
1007 #ifdef ENABLE_CHECKING
1008 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1010 result = invert_truthvalue (operand);
1013 case ATTR_ADDR_EXPR:
1015 switch (TREE_CODE (operand))
1018 case UNCONSTRAINED_ARRAY_REF:
1019 result = TREE_OPERAND (operand, 0);
1021 /* Make sure the type here is a pointer, not a reference.
1022 GCC wants pointer types for function addresses. */
1024 result_type = build_pointer_type (type);
1026 /* If the underlying object can alias everything, propagate the
1027 property since we are effectively retrieving the object. */
1028 if (POINTER_TYPE_P (TREE_TYPE (result))
1029 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1031 if (TREE_CODE (result_type) == POINTER_TYPE
1032 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1034 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1035 TYPE_MODE (result_type),
1037 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1038 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1040 = build_reference_type_for_mode (TREE_TYPE (result_type),
1041 TYPE_MODE (result_type),
1048 TREE_TYPE (result) = type = build_pointer_type (type);
1052 /* Fold a compound expression if it has unconstrained array type
1053 since the middle-end cannot handle it. But we don't it in the
1054 general case because it may introduce aliasing issues if the
1055 first operand is an indirect assignment and the second operand
1056 the corresponding address, e.g. for an allocator. */
1057 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1059 result = build_unary_op (ADDR_EXPR, result_type,
1060 TREE_OPERAND (operand, 1));
1061 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1062 TREE_OPERAND (operand, 0), result);
1068 case ARRAY_RANGE_REF:
1071 /* If this is for 'Address, find the address of the prefix and
1072 add the offset to the field. Otherwise, do this the normal
1074 if (op_code == ATTR_ADDR_EXPR)
1076 HOST_WIDE_INT bitsize;
1077 HOST_WIDE_INT bitpos;
1079 enum machine_mode mode;
1080 int unsignedp, volatilep;
1082 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1083 &mode, &unsignedp, &volatilep,
1086 /* If INNER is a padding type whose field has a self-referential
1087 size, convert to that inner type. We know the offset is zero
1088 and we need to have that type visible. */
1089 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1090 && CONTAINS_PLACEHOLDER_P
1091 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1092 (TREE_TYPE (inner))))))
1093 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1096 /* Compute the offset as a byte offset from INNER. */
1098 offset = size_zero_node;
1100 if (bitpos % BITS_PER_UNIT != 0)
1102 ("taking address of object not aligned on storage unit?",
1105 offset = size_binop (PLUS_EXPR, offset,
1106 size_int (bitpos / BITS_PER_UNIT));
1108 /* Take the address of INNER, convert the offset to void *, and
1109 add then. It will later be converted to the desired result
1111 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1112 inner = convert (ptr_void_type_node, inner);
1113 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1115 result = convert (build_pointer_type (TREE_TYPE (operand)),
1122 /* If this is just a constructor for a padded record, we can
1123 just take the address of the single field and convert it to
1124 a pointer to our type. */
1125 if (TYPE_IS_PADDING_P (type))
1127 result = VEC_index (constructor_elt,
1128 CONSTRUCTOR_ELTS (operand),
1130 result = convert (build_pointer_type (TREE_TYPE (operand)),
1131 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1138 if (AGGREGATE_TYPE_P (type)
1139 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1140 return build_unary_op (ADDR_EXPR, result_type,
1141 TREE_OPERAND (operand, 0));
1143 /* ... fallthru ... */
1145 case VIEW_CONVERT_EXPR:
1146 /* If this just a variant conversion or if the conversion doesn't
1147 change the mode, get the result type from this type and go down.
1148 This is needed for conversions of CONST_DECLs, to eventually get
1149 to the address of their CORRESPONDING_VARs. */
1150 if ((TYPE_MAIN_VARIANT (type)
1151 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1152 || (TYPE_MODE (type) != BLKmode
1153 && (TYPE_MODE (type)
1154 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1155 return build_unary_op (ADDR_EXPR,
1156 (result_type ? result_type
1157 : build_pointer_type (type)),
1158 TREE_OPERAND (operand, 0));
1162 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1164 /* ... fall through ... */
1169 /* If we are taking the address of a padded record whose field is
1170 contains a template, take the address of the template. */
1171 if (TYPE_IS_PADDING_P (type)
1172 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1173 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1175 type = TREE_TYPE (TYPE_FIELDS (type));
1176 operand = convert (type, operand);
1179 gnat_mark_addressable (operand);
1180 result = build_fold_addr_expr (operand);
1183 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1187 /* If we want to refer to an unconstrained array, use the appropriate
1188 expression to do so. This will never survive down to the back-end.
1189 But if TYPE is a thin pointer, first convert to a fat pointer. */
1190 if (TYPE_IS_THIN_POINTER_P (type)
1191 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1194 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1196 type = TREE_TYPE (operand);
1199 if (TYPE_IS_FAT_POINTER_P (type))
1201 result = build1 (UNCONSTRAINED_ARRAY_REF,
1202 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1203 TREE_READONLY (result)
1204 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1207 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1208 else if (TREE_CODE (operand) == ADDR_EXPR)
1209 result = TREE_OPERAND (operand, 0);
1211 /* Otherwise, build and fold the indirect reference. */
1214 result = build_fold_indirect_ref (operand);
1215 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1219 = (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1225 tree modulus = ((operation_type
1226 && TREE_CODE (operation_type) == INTEGER_TYPE
1227 && TYPE_MODULAR_P (operation_type))
1228 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1229 int mod_pow2 = modulus && integer_pow2p (modulus);
1231 /* If this is a modular type, there are various possibilities
1232 depending on the operation and whether the modulus is a
1233 power of two or not. */
1237 gcc_assert (operation_type == base_type);
1238 operand = convert (operation_type, operand);
1240 /* The fastest in the negate case for binary modulus is
1241 the straightforward code; the TRUNC_MOD_EXPR below
1242 is an AND operation. */
1243 if (op_code == NEGATE_EXPR && mod_pow2)
1244 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1245 fold_build1 (NEGATE_EXPR, operation_type,
1249 /* For nonbinary negate case, return zero for zero operand,
1250 else return the modulus minus the operand. If the modulus
1251 is a power of two minus one, we can do the subtraction
1252 as an XOR since it is equivalent and faster on most machines. */
1253 else if (op_code == NEGATE_EXPR && !mod_pow2)
1255 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1257 convert (operation_type,
1258 integer_one_node))))
1259 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1262 result = fold_build2 (MINUS_EXPR, operation_type,
1265 result = fold_build3 (COND_EXPR, operation_type,
1266 fold_build2 (NE_EXPR,
1271 integer_zero_node)),
1276 /* For the NOT cases, we need a constant equal to
1277 the modulus minus one. For a binary modulus, we
1278 XOR against the constant and subtract the operand from
1279 that constant for nonbinary modulus. */
1281 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1282 convert (operation_type,
1286 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1289 result = fold_build2 (MINUS_EXPR, operation_type,
1297 /* ... fall through ... */
1300 gcc_assert (operation_type == base_type);
1301 result = fold_build1 (op_code, operation_type,
1302 convert (operation_type, operand));
1307 TREE_SIDE_EFFECTS (result) = 1;
1308 if (TREE_CODE (result) == INDIRECT_REF)
1309 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1312 if (result_type && TREE_TYPE (result) != result_type)
1313 result = convert (result_type, result);
1318 /* Similar, but for COND_EXPR. */
1321 build_cond_expr (tree result_type, tree condition_operand,
1322 tree true_operand, tree false_operand)
1324 bool addr_p = false;
1327 /* The front-end verified that result, true and false operands have
1328 same base type. Convert everything to the result type. */
1329 true_operand = convert (result_type, true_operand);
1330 false_operand = convert (result_type, false_operand);
1332 /* If the result type is unconstrained, take the address of the operands and
1333 then dereference the result. Likewise if the result type is passed by
1334 reference, but this is natively handled in the gimplifier. */
1335 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1336 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1338 result_type = build_pointer_type (result_type);
1339 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1340 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1344 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1345 true_operand, false_operand);
1347 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1348 in both arms, make sure it gets evaluated by moving it ahead of the
1349 conditional expression. This is necessary because it is evaluated
1350 in only one place at run time and would otherwise be uninitialized
1351 in one of the arms. */
1352 true_operand = skip_simple_arithmetic (true_operand);
1353 false_operand = skip_simple_arithmetic (false_operand);
1355 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1356 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1359 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1364 /* Similar, but for RETURN_EXPR. If RET_VAL is non-null, build a RETURN_EXPR
1365 around the assignment of RET_VAL to RET_OBJ. Otherwise just build a bare
1366 RETURN_EXPR around RESULT_OBJ, which may be null in this case. */
1369 build_return_expr (tree ret_obj, tree ret_val)
1375 /* The gimplifier explicitly enforces the following invariant:
1384 As a consequence, type consistency dictates that we use the type
1385 of the RET_OBJ as the operation type. */
1386 tree operation_type = TREE_TYPE (ret_obj);
1388 /* Convert the right operand to the operation type. Note that it's the
1389 same transformation as in the MODIFY_EXPR case of build_binary_op,
1390 with the assumption that the type cannot involve a placeholder. */
1391 if (operation_type != TREE_TYPE (ret_val))
1392 ret_val = convert (operation_type, ret_val);
1394 result_expr = build2 (MODIFY_EXPR, operation_type, ret_obj, ret_val);
1397 result_expr = ret_obj;
1399 return build1 (RETURN_EXPR, void_type_node, result_expr);
1402 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1406 build_call_1_expr (tree fundecl, tree arg)
1408 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1409 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1411 TREE_SIDE_EFFECTS (call) = 1;
1415 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1419 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1421 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1422 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1424 TREE_SIDE_EFFECTS (call) = 1;
1428 /* Likewise to call FUNDECL with no arguments. */
1431 build_call_0_expr (tree fundecl)
1433 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1434 it possible to propagate DECL_IS_PURE on parameterless functions. */
1435 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1436 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1441 /* Call a function that raises an exception and pass the line number and file
1442 name, if requested. MSG says which exception function to call.
1444 GNAT_NODE is the gnat node conveying the source location for which the
1445 error should be signaled, or Empty in which case the error is signaled on
1446 the current ref_file_name/input_line.
1448 KIND says which kind of exception this is for
1449 (N_Raise_{Constraint,Storage,Program}_Error). */
1452 build_call_raise (int msg, Node_Id gnat_node, char kind)
1454 tree fndecl = gnat_raise_decls[msg];
1455 tree label = get_exception_label (kind);
1461 /* If this is to be done as a goto, handle that case. */
1464 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1465 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1467 /* If Local_Raise is present, generate
1468 Local_Raise (exception'Identity); */
1469 if (Present (local_raise))
1471 tree gnu_local_raise
1472 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1473 tree gnu_exception_entity
1474 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1476 = build_call_1_expr (gnu_local_raise,
1477 build_unary_op (ADDR_EXPR, NULL_TREE,
1478 gnu_exception_entity));
1480 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1481 gnu_call, gnu_result);}
1487 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1489 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1490 ? IDENTIFIER_POINTER
1491 (get_identifier (Get_Name_String
1493 (Get_Source_File_Index (Sloc (gnat_node))))))
1497 filename = build_string (len, str);
1499 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1500 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1502 TREE_TYPE (filename)
1503 = build_array_type (char_type_node, build_index_type (size_int (len)));
1506 build_call_2_expr (fndecl,
1507 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1509 build_int_cst (NULL_TREE, line_number));
1512 /* qsort comparer for the bit positions of two constructor elements
1513 for record components. */
1516 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1518 const_tree const elmt1 = * (const_tree const *) rt1;
1519 const_tree const elmt2 = * (const_tree const *) rt2;
1520 const_tree const field1 = TREE_PURPOSE (elmt1);
1521 const_tree const field2 = TREE_PURPOSE (elmt2);
1523 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1525 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1528 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1531 gnat_build_constructor (tree type, tree list)
1533 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1534 bool side_effects = false;
1538 /* Scan the elements to see if they are all constant or if any has side
1539 effects, to let us set global flags on the resulting constructor. Count
1540 the elements along the way for possible sorting purposes below. */
1541 for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1543 tree obj = TREE_PURPOSE (elmt);
1544 tree val = TREE_VALUE (elmt);
1546 /* The predicate must be in keeping with output_constructor. */
1547 if (!TREE_CONSTANT (val)
1548 || (TREE_CODE (type) == RECORD_TYPE
1549 && CONSTRUCTOR_BITFIELD_P (obj)
1550 && !initializer_constant_valid_for_bitfield_p (val))
1551 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1552 allconstant = false;
1554 if (TREE_SIDE_EFFECTS (val))
1555 side_effects = true;
1558 /* For record types with constant components only, sort field list
1559 by increasing bit position. This is necessary to ensure the
1560 constructor can be output as static data. */
1561 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1563 /* Fill an array with an element tree per index, and ask qsort to order
1564 them according to what a bitpos comparison function says. */
1565 tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1568 for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1571 qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1573 /* Then reconstruct the list from the sorted array contents. */
1575 for (i = n_elmts - 1; i >= 0; i--)
1577 TREE_CHAIN (gnu_arr[i]) = list;
1582 result = build_constructor_from_list (type, list);
1583 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1584 TREE_SIDE_EFFECTS (result) = side_effects;
1585 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1589 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1590 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1591 for the field. Don't fold the result if NO_FOLD_P is true.
1593 We also handle the fact that we might have been passed a pointer to the
1594 actual record and know how to look for fields in variant parts. */
1597 build_simple_component_ref (tree record_variable, tree component,
1598 tree field, bool no_fold_p)
1600 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1601 tree ref, inner_variable;
1603 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1604 || TREE_CODE (record_type) == UNION_TYPE
1605 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1606 && TYPE_SIZE (record_type)
1607 && (component != 0) != (field != 0));
1609 /* If no field was specified, look for a field with the specified name
1610 in the current record only. */
1612 for (field = TYPE_FIELDS (record_type); field;
1613 field = TREE_CHAIN (field))
1614 if (DECL_NAME (field) == component)
1620 /* If this field is not in the specified record, see if we can find
1621 something in the record whose original field is the same as this one. */
1622 if (DECL_CONTEXT (field) != record_type)
1623 /* Check if there is a field with name COMPONENT in the record. */
1627 /* First loop thru normal components. */
1628 for (new_field = TYPE_FIELDS (record_type); new_field;
1629 new_field = TREE_CHAIN (new_field))
1630 if (SAME_FIELD_P (field, new_field))
1633 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1634 the component in the first search. Doing this search in 2 steps
1635 is required to avoiding hidden homonymous fields in the
1638 for (new_field = TYPE_FIELDS (record_type); new_field;
1639 new_field = TREE_CHAIN (new_field))
1640 if (DECL_INTERNAL_P (new_field))
1643 = build_simple_component_ref (record_variable,
1644 NULL_TREE, new_field, no_fold_p);
1645 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1658 /* If the field's offset has overflowed, do not attempt to access it
1659 as doing so may trigger sanity checks deeper in the back-end.
1660 Note that we don't need to warn since this will be done on trying
1661 to declare the object. */
1662 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1663 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1666 /* Look through conversion between type variants. Note that this
1667 is transparent as far as the field is concerned. */
1668 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1669 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1671 inner_variable = TREE_OPERAND (record_variable, 0);
1673 inner_variable = record_variable;
1675 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1678 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1679 TREE_READONLY (ref) = 1;
1680 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1681 || TYPE_VOLATILE (record_type))
1682 TREE_THIS_VOLATILE (ref) = 1;
1687 /* The generic folder may punt in this case because the inner array type
1688 can be self-referential, but folding is in fact not problematic. */
1689 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1690 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1692 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1693 unsigned HOST_WIDE_INT idx;
1695 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1705 /* Like build_simple_component_ref, except that we give an error if the
1706 reference could not be found. */
1709 build_component_ref (tree record_variable, tree component,
1710 tree field, bool no_fold_p)
1712 tree ref = build_simple_component_ref (record_variable, component, field,
1718 /* If FIELD was specified, assume this is an invalid user field so raise
1719 Constraint_Error. Otherwise, we have no type to return so abort. */
1721 return build1 (NULL_EXPR, TREE_TYPE (field),
1722 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1723 N_Raise_Constraint_Error));
1726 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1727 identically. Process the case where a GNAT_PROC to call is provided. */
1730 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1731 Entity_Id gnat_proc, Entity_Id gnat_pool)
1733 tree gnu_proc = gnat_to_gnu (gnat_proc);
1734 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1737 /* The storage pools are obviously always tagged types, but the
1738 secondary stack uses the same mechanism and is not tagged. */
1739 if (Is_Tagged_Type (Etype (gnat_pool)))
1741 /* The size is the third parameter; the alignment is the
1743 Entity_Id gnat_size_type
1744 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1745 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1747 tree gnu_pool = gnat_to_gnu (gnat_pool);
1748 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1749 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1751 gnu_size = convert (gnu_size_type, gnu_size);
1752 gnu_align = convert (gnu_size_type, gnu_align);
1754 /* The first arg is always the address of the storage pool; next
1755 comes the address of the object, for a deallocator, then the
1756 size and alignment. */
1758 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1759 gnu_proc_addr, 4, gnu_pool_addr,
1760 gnu_obj, gnu_size, gnu_align);
1762 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1763 gnu_proc_addr, 3, gnu_pool_addr,
1764 gnu_size, gnu_align);
1767 /* Secondary stack case. */
1770 /* The size is the second parameter. */
1771 Entity_Id gnat_size_type
1772 = Etype (Next_Formal (First_Formal (gnat_proc)));
1773 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1775 gnu_size = convert (gnu_size_type, gnu_size);
1777 /* The first arg is the address of the object, for a deallocator,
1780 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1781 gnu_proc_addr, 2, gnu_obj, gnu_size);
1783 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1784 gnu_proc_addr, 1, gnu_size);
1787 TREE_SIDE_EFFECTS (gnu_call) = 1;
1791 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
1792 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1793 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1797 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1799 /* When the DATA_TYPE alignment is stricter than what malloc offers
1800 (super-aligned case), we allocate an "aligning" wrapper type and return
1801 the address of its single data field with the malloc's return value
1802 stored just in front. */
1804 unsigned int data_align = TYPE_ALIGN (data_type);
1805 unsigned int default_allocator_alignment
1806 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1809 = ((data_align > default_allocator_alignment)
1810 ? make_aligning_type (data_type, data_align, data_size,
1811 default_allocator_alignment,
1812 POINTER_SIZE / BITS_PER_UNIT)
1816 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1820 /* On VMS, if 64-bit memory is disabled or pointers are 64-bit and the
1821 allocator size is 32-bit or Convention C, allocate 32-bit memory. */
1822 if (TARGET_ABI_OPEN_VMS
1823 && (!TARGET_MALLOC64
1824 || (POINTER_SIZE == 64
1825 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1826 || Convention (Etype (gnat_node)) == Convention_C))))
1827 malloc_ptr = build_call_1_expr (malloc32_decl, size_to_malloc);
1829 malloc_ptr = build_call_1_expr (malloc_decl, size_to_malloc);
1833 /* Latch malloc's return value and get a pointer to the aligning field
1835 tree storage_ptr = gnat_protect_expr (malloc_ptr);
1837 tree aligning_record_addr
1838 = convert (build_pointer_type (aligning_type), storage_ptr);
1840 tree aligning_record
1841 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1844 = build_component_ref (aligning_record, NULL_TREE,
1845 TYPE_FIELDS (aligning_type), false);
1847 tree aligning_field_addr
1848 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1850 /* Then arrange to store the allocator's return value ahead
1852 tree storage_ptr_slot_addr
1853 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1854 convert (ptr_void_type_node, aligning_field_addr),
1855 size_int (-(HOST_WIDE_INT) POINTER_SIZE
1858 tree storage_ptr_slot
1859 = build_unary_op (INDIRECT_REF, NULL_TREE,
1860 convert (build_pointer_type (ptr_void_type_node),
1861 storage_ptr_slot_addr));
1864 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
1865 build_binary_op (MODIFY_EXPR, NULL_TREE,
1866 storage_ptr_slot, storage_ptr),
1867 aligning_field_addr);
1873 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
1874 designated by DATA_PTR using the __gnat_free entry point. */
1877 maybe_wrap_free (tree data_ptr, tree data_type)
1879 /* In the regular alignment case, we pass the data pointer straight to free.
1880 In the superaligned case, we need to retrieve the initial allocator
1881 return value, stored in front of the data block at allocation time. */
1883 unsigned int data_align = TYPE_ALIGN (data_type);
1884 unsigned int default_allocator_alignment
1885 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1889 if (data_align > default_allocator_alignment)
1891 /* DATA_FRONT_PTR (void *)
1892 = (void *)DATA_PTR - (void *)sizeof (void *)) */
1895 (POINTER_PLUS_EXPR, ptr_void_type_node,
1896 convert (ptr_void_type_node, data_ptr),
1897 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
1899 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
1902 (INDIRECT_REF, NULL_TREE,
1903 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
1906 free_ptr = data_ptr;
1908 return build_call_1_expr (free_decl, free_ptr);
1911 /* Build a GCC tree to call an allocation or deallocation function.
1912 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1913 generate an allocator.
1915 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
1916 object type, used to determine the to-be-honored address alignment.
1917 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
1918 pool to use. If not present, malloc and free are used. GNAT_NODE is used
1919 to provide an error location for restriction violation messages. */
1922 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
1923 Entity_Id gnat_proc, Entity_Id gnat_pool,
1926 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1928 /* Explicit proc to call ? This one is assumed to deal with the type
1929 alignment constraints. */
1930 if (Present (gnat_proc))
1931 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
1932 gnat_proc, gnat_pool);
1934 /* Otherwise, object to "free" or "malloc" with possible special processing
1935 for alignments stricter than what the default allocator honors. */
1937 return maybe_wrap_free (gnu_obj, gnu_type);
1940 /* Assert that we no longer can be called with this special pool. */
1941 gcc_assert (gnat_pool != -1);
1943 /* Check that we aren't violating the associated restriction. */
1944 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
1945 Check_No_Implicit_Heap_Alloc (gnat_node);
1947 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
1951 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1952 initial value is INIT, if INIT is nonzero. Convert the expression to
1953 RESULT_TYPE, which must be some type of pointer. Return the tree.
1955 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1956 the storage pool to use. GNAT_NODE is used to provide an error
1957 location for restriction violation messages. If IGNORE_INIT_TYPE is
1958 true, ignore the type of INIT for the purpose of determining the size;
1959 this will cause the maximum size to be allocated if TYPE is of
1960 self-referential size. */
1963 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1964 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1966 tree size = TYPE_SIZE_UNIT (type);
1969 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1970 if (init && TREE_CODE (init) == NULL_EXPR)
1971 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1973 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1974 sizes of the object and its template. Allocate the whole thing and
1975 fill in the parts that are known. */
1976 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
1979 = build_unc_object_type_from_ptr (result_type, type,
1980 get_identifier ("ALLOC"));
1981 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
1982 tree storage_ptr_type = build_pointer_type (storage_type);
1984 tree template_cons = NULL_TREE;
1986 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1989 /* If the size overflows, pass -1 so the allocator will raise
1991 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1992 size = ssize_int (-1);
1994 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
1995 gnat_proc, gnat_pool, gnat_node);
1996 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
1998 if (TYPE_IS_PADDING_P (type))
2000 type = TREE_TYPE (TYPE_FIELDS (type));
2002 init = convert (type, init);
2005 /* If there is an initializing expression, make a constructor for
2006 the entire object including the bounds and copy it into the
2007 object. If there is no initializing expression, just set the
2011 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
2013 template_cons = tree_cons (TYPE_FIELDS (storage_type),
2014 build_template (template_type, type,
2020 build2 (COMPOUND_EXPR, storage_ptr_type,
2022 (MODIFY_EXPR, storage_type,
2023 build_unary_op (INDIRECT_REF, NULL_TREE,
2024 convert (storage_ptr_type, storage)),
2025 gnat_build_constructor (storage_type, template_cons)),
2026 convert (storage_ptr_type, storage)));
2030 (COMPOUND_EXPR, result_type,
2032 (MODIFY_EXPR, template_type,
2034 (build_unary_op (INDIRECT_REF, NULL_TREE,
2035 convert (storage_ptr_type, storage)),
2036 NULL_TREE, TYPE_FIELDS (storage_type), false),
2037 build_template (template_type, type, NULL_TREE)),
2038 convert (result_type, convert (storage_ptr_type, storage)));
2041 /* If we have an initializing expression, see if its size is simpler
2042 than the size from the type. */
2043 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2044 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2045 || CONTAINS_PLACEHOLDER_P (size)))
2046 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2048 /* If the size is still self-referential, reference the initializing
2049 expression, if it is present. If not, this must have been a
2050 call to allocate a library-level object, in which case we use
2051 the maximum size. */
2052 if (CONTAINS_PLACEHOLDER_P (size))
2054 if (!ignore_init_type && init)
2055 size = substitute_placeholder_in_expr (size, init);
2057 size = max_size (size, true);
2060 /* If the size overflows, pass -1 so the allocator will raise
2062 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2063 size = ssize_int (-1);
2065 result = convert (result_type,
2066 build_call_alloc_dealloc (NULL_TREE, size, type,
2067 gnat_proc, gnat_pool,
2070 /* If we have an initial value, protect the new address, assign the value
2071 and return the address with a COMPOUND_EXPR. */
2074 result = gnat_protect_expr (result);
2076 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2078 (MODIFY_EXPR, NULL_TREE,
2079 build_unary_op (INDIRECT_REF,
2080 TREE_TYPE (TREE_TYPE (result)), result),
2085 return convert (result_type, result);
2088 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2089 GNAT_FORMAL is how we find the descriptor record. GNAT_ACTUAL is
2090 how we derive the source location to raise C_E on an out of range
2094 fill_vms_descriptor (tree expr, Entity_Id gnat_formal, Node_Id gnat_actual)
2096 tree parm_decl = get_gnu_tree (gnat_formal);
2097 tree record_type = TREE_TYPE (TREE_TYPE (parm_decl));
2098 tree const_list = NULL_TREE, field;
2099 const bool do_range_check
2101 IDENTIFIER_POINTER (DECL_NAME (TYPE_FIELDS (record_type))));
2103 expr = maybe_unconstrained_array (expr);
2104 gnat_mark_addressable (expr);
2106 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2108 tree conexpr = convert (TREE_TYPE (field),
2109 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2110 (DECL_INITIAL (field), expr));
2112 /* Check to ensure that only 32-bit pointers are passed in
2113 32-bit descriptors */
2115 && strcmp (IDENTIFIER_POINTER (DECL_NAME (field)), "POINTER") == 0)
2118 = build_pointer_type_for_mode (void_type_node, DImode, false);
2119 tree addr64expr = build_unary_op (ADDR_EXPR, pointer64type, expr);
2121 = build_int_cstu (long_integer_type_node, 0x80000000);
2123 add_stmt (build3 (COND_EXPR, void_type_node,
2124 build_binary_op (GE_EXPR, boolean_type_node,
2125 convert (long_integer_type_node,
2128 build_call_raise (CE_Range_Check_Failed,
2130 N_Raise_Constraint_Error),
2133 const_list = tree_cons (field, conexpr, const_list);
2136 return gnat_build_constructor (record_type, nreverse (const_list));
2139 /* Indicate that we need to take the address of T and that it therefore
2140 should not be allocated in a register. Returns true if successful. */
2143 gnat_mark_addressable (tree t)
2146 switch (TREE_CODE (t))
2151 case ARRAY_RANGE_REF:
2154 case VIEW_CONVERT_EXPR:
2155 case NON_LVALUE_EXPR:
2157 t = TREE_OPERAND (t, 0);
2161 t = TREE_OPERAND (t, 1);
2165 TREE_ADDRESSABLE (t) = 1;
2171 TREE_ADDRESSABLE (t) = 1;
2175 TREE_ADDRESSABLE (t) = 1;
2179 return DECL_CONST_CORRESPONDING_VAR (t)
2180 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2187 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2188 but we know how to handle our own nodes. */
2191 gnat_save_expr (tree exp)
2193 tree type = TREE_TYPE (exp);
2194 enum tree_code code = TREE_CODE (exp);
2196 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2199 if (code == UNCONSTRAINED_ARRAY_REF)
2201 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2202 TREE_READONLY (t) = TYPE_READONLY (type);
2206 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2207 This may be more efficient, but will also allow us to more easily find
2208 the match for the PLACEHOLDER_EXPR. */
2209 if (code == COMPONENT_REF
2210 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2211 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2212 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2214 return save_expr (exp);
2217 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2218 is optimized under the assumption that EXP's value doesn't change before
2219 its subsequent reuse(s) except through its potential reevaluation. */
2222 gnat_protect_expr (tree exp)
2224 tree type = TREE_TYPE (exp);
2225 enum tree_code code = TREE_CODE (exp);
2227 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2230 /* If EXP has no side effects, we theoritically don't need to do anything.
2231 However, we may be recursively passed more and more complex expressions
2232 involving checks which will be reused multiple times and eventually be
2233 unshared for gimplification; in order to avoid a complexity explosion
2234 at that point, we protect any expressions more complex than a simple
2235 arithmetic expression. */
2236 if (!TREE_SIDE_EFFECTS (exp))
2238 tree inner = skip_simple_arithmetic (exp);
2239 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2243 /* If this is a conversion, protect what's inside the conversion. */
2244 if (code == NON_LVALUE_EXPR
2245 || CONVERT_EXPR_CODE_P (code)
2246 || code == VIEW_CONVERT_EXPR)
2247 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2249 /* If we're indirectly referencing something, we only need to protect the
2250 address since the data itself can't change in these situations. */
2251 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2253 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2254 TREE_READONLY (t) = TYPE_READONLY (type);
2258 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2259 This may be more efficient, but will also allow us to more easily find
2260 the match for the PLACEHOLDER_EXPR. */
2261 if (code == COMPONENT_REF
2262 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2263 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2264 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2266 /* If this is a fat pointer or something that can be placed in a register,
2267 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2268 returned via invisible reference in most ABIs so the temporary will
2269 directly be filled by the callee. */
2270 if (TYPE_IS_FAT_POINTER_P (type)
2271 || TYPE_MODE (type) != BLKmode
2272 || code == CALL_EXPR)
2273 return save_expr (exp);
2275 /* Otherwise reference, protect the address and dereference. */
2277 build_unary_op (INDIRECT_REF, type,
2278 save_expr (build_unary_op (ADDR_EXPR,
2279 build_reference_type (type),
2283 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2284 argument to force evaluation of everything. */
2287 gnat_stabilize_reference_1 (tree e, bool force)
2289 enum tree_code code = TREE_CODE (e);
2290 tree type = TREE_TYPE (e);
2293 /* We cannot ignore const expressions because it might be a reference
2294 to a const array but whose index contains side-effects. But we can
2295 ignore things that are actual constant or that already have been
2296 handled by this function. */
2297 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2300 switch (TREE_CODE_CLASS (code))
2302 case tcc_exceptional:
2303 case tcc_declaration:
2304 case tcc_comparison:
2305 case tcc_expression:
2308 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2309 fat pointer. This may be more efficient, but will also allow
2310 us to more easily find the match for the PLACEHOLDER_EXPR. */
2311 if (code == COMPONENT_REF
2312 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2314 = build3 (code, type,
2315 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2316 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2317 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2318 so that it will only be evaluated once. */
2319 /* The tcc_reference and tcc_comparison classes could be handled as
2320 below, but it is generally faster to only evaluate them once. */
2321 else if (TREE_SIDE_EFFECTS (e) || force)
2322 return save_expr (e);
2328 /* Recursively stabilize each operand. */
2330 = build2 (code, type,
2331 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2332 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2336 /* Recursively stabilize each operand. */
2338 = build1 (code, type,
2339 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2346 /* See similar handling in gnat_stabilize_reference. */
2347 TREE_READONLY (result) = TREE_READONLY (e);
2348 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2349 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2354 /* This is equivalent to stabilize_reference in tree.c but we know how to
2355 handle our own nodes and we take extra arguments. FORCE says whether to
2356 force evaluation of everything. We set SUCCESS to true unless we walk
2357 through something we don't know how to stabilize. */
2360 gnat_stabilize_reference (tree ref, bool force, bool *success)
2362 tree type = TREE_TYPE (ref);
2363 enum tree_code code = TREE_CODE (ref);
2366 /* Assume we'll success unless proven otherwise. */
2376 /* No action is needed in this case. */
2382 case FIX_TRUNC_EXPR:
2383 case VIEW_CONVERT_EXPR:
2385 = build1 (code, type,
2386 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2391 case UNCONSTRAINED_ARRAY_REF:
2392 result = build1 (code, type,
2393 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2398 result = build3 (COMPONENT_REF, type,
2399 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2401 TREE_OPERAND (ref, 1), NULL_TREE);
2405 result = build3 (BIT_FIELD_REF, type,
2406 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2408 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2410 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2415 case ARRAY_RANGE_REF:
2416 result = build4 (code, type,
2417 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2419 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2421 NULL_TREE, NULL_TREE);
2425 result = gnat_stabilize_reference_1 (ref, force);
2429 result = build2 (COMPOUND_EXPR, type,
2430 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2432 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2437 /* Constructors with 1 element are used extensively to formally
2438 convert objects to special wrapping types. */
2439 if (TREE_CODE (type) == RECORD_TYPE
2440 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2443 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2445 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2447 = build_constructor_single (type, index,
2448 gnat_stabilize_reference_1 (value,
2460 ref = error_mark_node;
2462 /* ... fall through to failure ... */
2464 /* If arg isn't a kind of lvalue we recognize, make no change.
2465 Caller should recognize the error for an invalid lvalue. */
2472 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2473 may not be sustained across some paths, such as the way via build1 for
2474 INDIRECT_REF. We reset those flags here in the general case, which is
2475 consistent with the GCC version of this routine.
2477 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2478 paths introduce side-effects where there was none initially (e.g. if a
2479 SAVE_EXPR is built) and we also want to keep track of that. */
2480 TREE_READONLY (result) = TREE_READONLY (ref);
2481 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2482 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);