1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2005, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
49 static tree find_common_type (tree, tree);
50 static bool contains_save_expr_p (tree);
51 static tree contains_null_expr (tree);
52 static tree compare_arrays (tree, tree, tree);
53 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54 static tree build_simple_component_ref (tree, tree, tree, bool);
56 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
59 This preparation consists of taking the ordinary representation of
60 an expression expr and producing a valid tree boolean expression
61 describing whether expr is nonzero. We could simply always do
63 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
65 but we optimize comparisons, &&, ||, and !.
67 The resulting type should always be the same as the input type.
68 This function is simpler than the corresponding C version since
69 the only possible operands will be things of Boolean type. */
72 gnat_truthvalue_conversion (tree expr)
74 tree type = TREE_TYPE (expr);
76 switch (TREE_CODE (expr))
78 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
79 case LT_EXPR: case GT_EXPR:
80 case TRUTH_ANDIF_EXPR:
89 return (integer_zerop (expr) ? convert (type, integer_zero_node)
90 : convert (type, integer_one_node));
93 return (real_zerop (expr) ? convert (type, integer_zero_node)
94 : convert (type, integer_one_node));
97 /* Distribute the conversion into the arms of a COND_EXPR. */
99 (build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
100 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
101 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
104 return build_binary_op (NE_EXPR, type, expr,
105 convert (type, integer_zero_node));
109 /* Return the base type of TYPE. */
112 get_base_type (tree type)
114 if (TREE_CODE (type) == RECORD_TYPE
115 && TYPE_JUSTIFIED_MODULAR_P (type))
116 type = TREE_TYPE (TYPE_FIELDS (type));
118 while (TREE_TYPE (type)
119 && (TREE_CODE (type) == INTEGER_TYPE
120 || TREE_CODE (type) == REAL_TYPE))
121 type = TREE_TYPE (type);
126 /* Likewise, but only return types known to the Ada source. */
128 get_ada_base_type (tree type)
130 while (TREE_TYPE (type)
131 && (TREE_CODE (type) == INTEGER_TYPE
132 || TREE_CODE (type) == REAL_TYPE)
133 && !TYPE_EXTRA_SUBTYPE_P (type))
134 type = TREE_TYPE (type);
139 /* EXP is a GCC tree representing an address. See if we can find how
140 strictly the object at that address is aligned. Return that alignment
141 in bits. If we don't know anything about the alignment, return 0. */
144 known_alignment (tree exp)
146 unsigned int this_alignment;
147 unsigned int lhs, rhs;
148 unsigned int type_alignment;
150 /* For pointer expressions, we know that the designated object is always at
151 least as strictly aligned as the designated subtype, so we account for
152 both type and expression information in this case.
154 Beware that we can still get a dummy designated subtype here (e.g. Taft
155 Amendement types), in which the alignment information is meaningless and
158 We always compute a type_alignment value and return the MAX of it
159 compared with what we get from the expression tree. Just set the
160 type_alignment value to 0 when the type information is to be ignored. */
162 = ((POINTER_TYPE_P (TREE_TYPE (exp))
163 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
164 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
166 switch (TREE_CODE (exp))
170 case NON_LVALUE_EXPR:
171 /* Conversions between pointers and integers don't change the alignment
172 of the underlying object. */
173 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
178 /* If two address are added, the alignment of the result is the
179 minimum of the two alignments. */
180 lhs = known_alignment (TREE_OPERAND (exp, 0));
181 rhs = known_alignment (TREE_OPERAND (exp, 1));
182 this_alignment = MIN (lhs, rhs);
186 /* The first part of this represents the lowest bit in the constant,
187 but is it in bytes, not bits. */
190 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
195 /* If we know the alignment of just one side, use it. Otherwise,
196 use the product of the alignments. */
197 lhs = known_alignment (TREE_OPERAND (exp, 0));
198 rhs = known_alignment (TREE_OPERAND (exp, 1));
200 if (lhs == 0 || rhs == 0)
201 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
203 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
207 this_alignment = expr_align (TREE_OPERAND (exp, 0));
215 return MAX (type_alignment, this_alignment);
218 /* We have a comparison or assignment operation on two types, T1 and T2,
219 which are both either array types or both record types.
220 Return the type that both operands should be converted to, if any.
221 Otherwise return zero. */
224 find_common_type (tree t1, tree t2)
226 /* If either type is non-BLKmode, use it. Note that we know that we will
227 not have any alignment problems since if we did the non-BLKmode
228 type could not have been used. */
229 if (TYPE_MODE (t1) != BLKmode)
231 else if (TYPE_MODE (t2) != BLKmode)
234 /* Otherwise, return the type that has a constant size. */
235 if (TREE_CONSTANT (TYPE_SIZE (t1)))
237 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
240 /* In this case, both types have variable size. It's probably
241 best to leave the "type mismatch" because changing it could
242 case a bad self-referential reference. */
246 /* See if EXP contains a SAVE_EXPR in a position where we would
249 ??? This is a real kludge, but is probably the best approach short
250 of some very general solution. */
253 contains_save_expr_p (tree exp)
255 switch (TREE_CODE (exp))
260 case ADDR_EXPR: case INDIRECT_REF:
262 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
263 return contains_save_expr_p (TREE_OPERAND (exp, 0));
266 return (CONSTRUCTOR_ELTS (exp)
267 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp)));
270 return (contains_save_expr_p (TREE_VALUE (exp))
272 && contains_save_expr_p (TREE_CHAIN (exp))));
279 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
280 it if so. This is used to detect types whose sizes involve computations
281 that are known to raise Constraint_Error. */
284 contains_null_expr (tree exp)
288 if (TREE_CODE (exp) == NULL_EXPR)
291 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
294 return contains_null_expr (TREE_OPERAND (exp, 0));
298 tem = contains_null_expr (TREE_OPERAND (exp, 0));
302 return contains_null_expr (TREE_OPERAND (exp, 1));
305 switch (TREE_CODE (exp))
308 return contains_null_expr (TREE_OPERAND (exp, 0));
311 tem = contains_null_expr (TREE_OPERAND (exp, 0));
315 tem = contains_null_expr (TREE_OPERAND (exp, 1));
319 return contains_null_expr (TREE_OPERAND (exp, 2));
330 /* Return an expression tree representing an equality comparison of
331 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
332 be of type RESULT_TYPE
334 Two arrays are equal in one of two ways: (1) if both have zero length
335 in some dimension (not necessarily the same dimension) or (2) if the
336 lengths in each dimension are equal and the data is equal. We perform the
337 length tests in as efficient a manner as possible. */
340 compare_arrays (tree result_type, tree a1, tree a2)
342 tree t1 = TREE_TYPE (a1);
343 tree t2 = TREE_TYPE (a2);
344 tree result = convert (result_type, integer_one_node);
345 tree a1_is_null = convert (result_type, integer_zero_node);
346 tree a2_is_null = convert (result_type, integer_zero_node);
347 bool length_zero_p = false;
349 /* Process each dimension separately and compare the lengths. If any
350 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
351 suppress the comparison of the data. */
352 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
354 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
355 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
356 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
357 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
358 tree bt = get_base_type (TREE_TYPE (lb1));
359 tree length1 = fold (build2 (MINUS_EXPR, bt, ub1, lb1));
360 tree length2 = fold (build2 (MINUS_EXPR, bt, ub2, lb2));
363 tree comparison, this_a1_is_null, this_a2_is_null;
365 /* If the length of the first array is a constant, swap our operands
366 unless the length of the second array is the constant zero.
367 Note that we have set the `length' values to the length - 1. */
368 if (TREE_CODE (length1) == INTEGER_CST
369 && !integer_zerop (fold (build2 (PLUS_EXPR, bt, length2,
370 convert (bt, integer_one_node)))))
372 tem = a1, a1 = a2, a2 = tem;
373 tem = t1, t1 = t2, t2 = tem;
374 tem = lb1, lb1 = lb2, lb2 = tem;
375 tem = ub1, ub1 = ub2, ub2 = tem;
376 tem = length1, length1 = length2, length2 = tem;
377 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
380 /* If the length of this dimension in the second array is the constant
381 zero, we can just go inside the original bounds for the first
382 array and see if last < first. */
383 if (integer_zerop (fold (build2 (PLUS_EXPR, bt, length2,
384 convert (bt, integer_one_node)))))
386 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
387 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
389 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
390 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
391 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
393 length_zero_p = true;
394 this_a1_is_null = comparison;
395 this_a2_is_null = convert (result_type, integer_one_node);
398 /* If the length is some other constant value, we know that the
399 this dimension in the first array cannot be superflat, so we
400 can just use its length from the actual stored bounds. */
401 else if (TREE_CODE (length2) == INTEGER_CST)
403 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
404 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
405 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
406 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
407 nbt = get_base_type (TREE_TYPE (ub1));
410 = build_binary_op (EQ_EXPR, result_type,
411 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
412 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
414 /* Note that we know that UB2 and LB2 are constant and hence
415 cannot contain a PLACEHOLDER_EXPR. */
417 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
418 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
420 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
421 this_a2_is_null = convert (result_type, integer_zero_node);
424 /* Otherwise compare the computed lengths. */
427 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
428 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
431 = build_binary_op (EQ_EXPR, result_type, length1, length2);
434 = build_binary_op (LT_EXPR, result_type, length1,
435 convert (bt, integer_zero_node));
437 = build_binary_op (LT_EXPR, result_type, length2,
438 convert (bt, integer_zero_node));
441 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
444 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
445 this_a1_is_null, a1_is_null);
446 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
447 this_a2_is_null, a2_is_null);
453 /* Unless the size of some bound is known to be zero, compare the
454 data in the array. */
457 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
460 a1 = convert (type, a1), a2 = convert (type, a2);
462 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
463 fold (build2 (EQ_EXPR, result_type, a1, a2)));
467 /* The result is also true if both sizes are zero. */
468 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
469 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
470 a1_is_null, a2_is_null),
473 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
474 starting the comparison above since the place it would be otherwise
475 evaluated would be wrong. */
477 if (contains_save_expr_p (a1))
478 result = build2 (COMPOUND_EXPR, result_type, a1, result);
480 if (contains_save_expr_p (a2))
481 result = build2 (COMPOUND_EXPR, result_type, a2, result);
486 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
487 type TYPE. We know that TYPE is a modular type with a nonbinary
491 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
494 tree modulus = TYPE_MODULUS (type);
495 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
496 unsigned int precision;
497 bool unsignedp = true;
501 /* If this is an addition of a constant, convert it to a subtraction
502 of a constant since we can do that faster. */
503 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
504 rhs = fold (build2 (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
506 /* For the logical operations, we only need PRECISION bits. For
507 addition and subtraction, we need one more and for multiplication we
508 need twice as many. But we never want to make a size smaller than
510 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
511 needed_precision += 1;
512 else if (op_code == MULT_EXPR)
513 needed_precision *= 2;
515 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
517 /* Unsigned will do for everything but subtraction. */
518 if (op_code == MINUS_EXPR)
521 /* If our type is the wrong signedness or isn't wide enough, make a new
522 type and convert both our operands to it. */
523 if (TYPE_PRECISION (op_type) < precision
524 || TYPE_UNSIGNED (op_type) != unsignedp)
526 /* Copy the node so we ensure it can be modified to make it modular. */
527 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
528 modulus = convert (op_type, modulus);
529 SET_TYPE_MODULUS (op_type, modulus);
530 TYPE_MODULAR_P (op_type) = 1;
531 lhs = convert (op_type, lhs);
532 rhs = convert (op_type, rhs);
535 /* Do the operation, then we'll fix it up. */
536 result = fold (build2 (op_code, op_type, lhs, rhs));
538 /* For multiplication, we have no choice but to do a full modulus
539 operation. However, we want to do this in the narrowest
541 if (op_code == MULT_EXPR)
543 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
544 modulus = convert (div_type, modulus);
545 SET_TYPE_MODULUS (div_type, modulus);
546 TYPE_MODULAR_P (div_type) = 1;
547 result = convert (op_type,
548 fold (build2 (TRUNC_MOD_EXPR, div_type,
549 convert (div_type, result), modulus)));
552 /* For subtraction, add the modulus back if we are negative. */
553 else if (op_code == MINUS_EXPR)
555 result = save_expr (result);
556 result = fold (build3 (COND_EXPR, op_type,
557 build2 (LT_EXPR, integer_type_node, result,
558 convert (op_type, integer_zero_node)),
559 fold (build2 (PLUS_EXPR, op_type,
564 /* For the other operations, subtract the modulus if we are >= it. */
567 result = save_expr (result);
568 result = fold (build3 (COND_EXPR, op_type,
569 build2 (GE_EXPR, integer_type_node,
571 fold (build2 (MINUS_EXPR, op_type,
576 return convert (type, result);
579 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
580 desired for the result. Usually the operation is to be performed
581 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
582 in which case the type to be used will be derived from the operands.
584 This function is very much unlike the ones for C and C++ since we
585 have already done any type conversion and matching required. All we
586 have to do here is validate the work done by SEM and handle subtypes. */
589 build_binary_op (enum tree_code op_code, tree result_type,
590 tree left_operand, tree right_operand)
592 tree left_type = TREE_TYPE (left_operand);
593 tree right_type = TREE_TYPE (right_operand);
594 tree left_base_type = get_base_type (left_type);
595 tree right_base_type = get_base_type (right_type);
596 tree operation_type = result_type;
597 tree best_type = NULL_TREE;
600 bool has_side_effects = false;
603 && TREE_CODE (operation_type) == RECORD_TYPE
604 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
605 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
608 && !AGGREGATE_TYPE_P (operation_type)
609 && TYPE_EXTRA_SUBTYPE_P (operation_type))
610 operation_type = get_base_type (operation_type);
612 modulus = (operation_type && TREE_CODE (operation_type) == INTEGER_TYPE
613 && TYPE_MODULAR_P (operation_type)
614 ? TYPE_MODULUS (operation_type) : 0);
619 /* If there were any integral or pointer conversions on LHS, remove
620 them; we'll be putting them back below if needed. Likewise for
621 conversions between array and record types. But don't do this if
622 the right operand is not BLKmode (for packed arrays)
623 unless we are not changing the mode. */
624 while ((TREE_CODE (left_operand) == CONVERT_EXPR
625 || TREE_CODE (left_operand) == NOP_EXPR
626 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
627 && (((INTEGRAL_TYPE_P (left_type)
628 || POINTER_TYPE_P (left_type))
629 && (INTEGRAL_TYPE_P (TREE_TYPE
630 (TREE_OPERAND (left_operand, 0)))
631 || POINTER_TYPE_P (TREE_TYPE
632 (TREE_OPERAND (left_operand, 0)))))
633 || (((TREE_CODE (left_type) == RECORD_TYPE
634 /* Don't remove conversions to justified modular
636 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
637 || TREE_CODE (left_type) == ARRAY_TYPE)
638 && ((TREE_CODE (TREE_TYPE
639 (TREE_OPERAND (left_operand, 0)))
641 || (TREE_CODE (TREE_TYPE
642 (TREE_OPERAND (left_operand, 0)))
644 && (TYPE_MODE (right_type) == BLKmode
645 || (TYPE_MODE (left_type)
646 == TYPE_MODE (TREE_TYPE
648 (left_operand, 0))))))))
650 left_operand = TREE_OPERAND (left_operand, 0);
651 left_type = TREE_TYPE (left_operand);
655 operation_type = left_type;
657 /* If the RHS has a conversion between record and array types and
658 an inner type is no worse, use it. Note we cannot do this for
659 modular types or types with TYPE_ALIGN_OK, since the latter
660 might indicate a conversion between a root type and a class-wide
661 type, which we must not remove. */
662 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
663 && (((TREE_CODE (right_type) == RECORD_TYPE
664 || TREE_CODE (right_type) == UNION_TYPE)
665 && !TYPE_JUSTIFIED_MODULAR_P (right_type)
666 && !TYPE_ALIGN_OK (right_type)
667 && !TYPE_IS_FAT_POINTER_P (right_type))
668 || TREE_CODE (right_type) == ARRAY_TYPE)
669 && ((((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
671 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
673 && !(TYPE_JUSTIFIED_MODULAR_P
674 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
676 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
677 && !(TYPE_IS_FAT_POINTER_P
678 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
679 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
682 = find_common_type (right_type,
683 TREE_TYPE (TREE_OPERAND
684 (right_operand, 0))))
685 || right_type != best_type))
687 right_operand = TREE_OPERAND (right_operand, 0);
688 right_type = TREE_TYPE (right_operand);
691 /* If we are copying one array or record to another, find the best type
693 if (((TREE_CODE (left_type) == ARRAY_TYPE
694 && TREE_CODE (right_type) == ARRAY_TYPE)
695 || (TREE_CODE (left_type) == RECORD_TYPE
696 && TREE_CODE (right_type) == RECORD_TYPE))
697 && (best_type = find_common_type (left_type, right_type)))
698 operation_type = best_type;
700 /* If a class-wide type may be involved, force use of the RHS type. */
701 if ((TREE_CODE (right_type) == RECORD_TYPE
702 || TREE_CODE (right_type) == UNION_TYPE)
703 && TYPE_ALIGN_OK (right_type))
704 operation_type = right_type;
706 /* Ensure everything on the LHS is valid. If we have a field reference,
707 strip anything that get_inner_reference can handle. Then remove any
708 conversions with type types having the same code and mode. Mark
709 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
710 either an INDIRECT_REF or a decl. */
711 result = left_operand;
714 tree restype = TREE_TYPE (result);
716 if (TREE_CODE (result) == COMPONENT_REF
717 || TREE_CODE (result) == ARRAY_REF
718 || TREE_CODE (result) == ARRAY_RANGE_REF)
719 while (handled_component_p (result))
720 result = TREE_OPERAND (result, 0);
721 else if (TREE_CODE (result) == REALPART_EXPR
722 || TREE_CODE (result) == IMAGPART_EXPR
723 || ((TREE_CODE (result) == NOP_EXPR
724 || TREE_CODE (result) == CONVERT_EXPR)
725 && (((TREE_CODE (restype)
726 == TREE_CODE (TREE_TYPE
727 (TREE_OPERAND (result, 0))))
728 && (TYPE_MODE (TREE_TYPE
729 (TREE_OPERAND (result, 0)))
730 == TYPE_MODE (restype)))
731 || TYPE_ALIGN_OK (restype))))
732 result = TREE_OPERAND (result, 0);
733 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
735 TREE_ADDRESSABLE (result) = 1;
736 result = TREE_OPERAND (result, 0);
742 gcc_assert (TREE_CODE (result) == INDIRECT_REF
743 || TREE_CODE (result) == NULL_EXPR || DECL_P (result));
745 /* Convert the right operand to the operation type unless
746 it is either already of the correct type or if the type
747 involves a placeholder, since the RHS may not have the same
749 if (operation_type != right_type
750 && (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type))))
752 right_operand = convert (operation_type, right_operand);
753 right_type = operation_type;
756 /* If the left operand is not the same type as the operation type,
757 surround it in a VIEW_CONVERT_EXPR. */
758 if (left_type != operation_type)
759 left_operand = unchecked_convert (operation_type, left_operand, false);
761 has_side_effects = true;
767 operation_type = TREE_TYPE (left_type);
769 /* ... fall through ... */
771 case ARRAY_RANGE_REF:
773 /* First convert the right operand to its base type. This will
774 prevent unneeded signedness conversions when sizetype is wider than
776 right_operand = convert (right_base_type, right_operand);
777 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
779 if (!TREE_CONSTANT (right_operand)
780 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
781 gnat_mark_addressable (left_operand);
790 gcc_assert (!POINTER_TYPE_P (left_type));
792 /* ... fall through ... */
796 /* If either operand is a NULL_EXPR, just return a new one. */
797 if (TREE_CODE (left_operand) == NULL_EXPR)
798 return build2 (op_code, result_type,
799 build1 (NULL_EXPR, integer_type_node,
800 TREE_OPERAND (left_operand, 0)),
803 else if (TREE_CODE (right_operand) == NULL_EXPR)
804 return build2 (op_code, result_type,
805 build1 (NULL_EXPR, integer_type_node,
806 TREE_OPERAND (right_operand, 0)),
809 /* If either object is a justified modular types, get the
810 fields from within. */
811 if (TREE_CODE (left_type) == RECORD_TYPE
812 && TYPE_JUSTIFIED_MODULAR_P (left_type))
814 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
816 left_type = TREE_TYPE (left_operand);
817 left_base_type = get_base_type (left_type);
820 if (TREE_CODE (right_type) == RECORD_TYPE
821 && TYPE_JUSTIFIED_MODULAR_P (right_type))
823 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
825 right_type = TREE_TYPE (right_operand);
826 right_base_type = get_base_type (right_type);
829 /* If both objects are arrays, compare them specially. */
830 if ((TREE_CODE (left_type) == ARRAY_TYPE
831 || (TREE_CODE (left_type) == INTEGER_TYPE
832 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
833 && (TREE_CODE (right_type) == ARRAY_TYPE
834 || (TREE_CODE (right_type) == INTEGER_TYPE
835 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
837 result = compare_arrays (result_type, left_operand, right_operand);
839 if (op_code == NE_EXPR)
840 result = invert_truthvalue (result);
842 gcc_assert (op_code == EQ_EXPR);
847 /* Otherwise, the base types must be the same unless the objects are
848 records. If we have records, use the best type and convert both
849 operands to that type. */
850 if (left_base_type != right_base_type)
852 if (TREE_CODE (left_base_type) == RECORD_TYPE
853 && TREE_CODE (right_base_type) == RECORD_TYPE)
855 /* The only way these are permitted to be the same is if both
856 types have the same name. In that case, one of them must
857 not be self-referential. Use that one as the best type.
858 Even better is if one is of fixed size. */
859 best_type = NULL_TREE;
861 gcc_assert (TYPE_NAME (left_base_type)
862 && (TYPE_NAME (left_base_type)
863 == TYPE_NAME (right_base_type)));
865 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
866 best_type = left_base_type;
867 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
868 best_type = right_base_type;
869 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
870 best_type = left_base_type;
871 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
872 best_type = right_base_type;
876 left_operand = convert (best_type, left_operand);
877 right_operand = convert (best_type, right_operand);
883 /* If we are comparing a fat pointer against zero, we need to
884 just compare the data pointer. */
885 else if (TYPE_FAT_POINTER_P (left_base_type)
886 && TREE_CODE (right_operand) == CONSTRUCTOR
887 && integer_zerop (TREE_VALUE
888 (CONSTRUCTOR_ELTS (right_operand))))
890 right_operand = build_component_ref (left_operand, NULL_TREE,
891 TYPE_FIELDS (left_base_type),
893 left_operand = convert (TREE_TYPE (right_operand),
898 left_operand = convert (left_base_type, left_operand);
899 right_operand = convert (right_base_type, right_operand);
905 case PREINCREMENT_EXPR:
906 case PREDECREMENT_EXPR:
907 case POSTINCREMENT_EXPR:
908 case POSTDECREMENT_EXPR:
909 /* In these, the result type and the left operand type should be the
910 same. Do the operation in the base type of those and convert the
911 right operand (which is an integer) to that type.
913 Note that these operations are only used in loop control where
914 we guarantee that no overflow can occur. So nothing special need
915 be done for modular types. */
917 gcc_assert (left_type == result_type);
918 operation_type = get_base_type (result_type);
919 left_operand = convert (operation_type, left_operand);
920 right_operand = convert (operation_type, right_operand);
921 has_side_effects = true;
929 /* The RHS of a shift can be any type. Also, ignore any modulus
930 (we used to abort, but this is needed for unchecked conversion
931 to modular types). Otherwise, processing is the same as normal. */
932 gcc_assert (operation_type == left_base_type);
934 left_operand = convert (operation_type, left_operand);
937 case TRUTH_ANDIF_EXPR:
938 case TRUTH_ORIF_EXPR:
942 left_operand = gnat_truthvalue_conversion (left_operand);
943 right_operand = gnat_truthvalue_conversion (right_operand);
949 /* For binary modulus, if the inputs are in range, so are the
951 if (modulus && integer_pow2p (modulus))
957 gcc_assert (TREE_TYPE (result_type) == left_base_type
958 && TREE_TYPE (result_type) == right_base_type);
959 left_operand = convert (left_base_type, left_operand);
960 right_operand = convert (right_base_type, right_operand);
963 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
964 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
965 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
966 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
967 /* These always produce results lower than either operand. */
973 /* The result type should be the same as the base types of the
974 both operands (and they should be the same). Convert
975 everything to the result type. */
977 gcc_assert (operation_type == left_base_type
978 && left_base_type == right_base_type);
979 left_operand = convert (operation_type, left_operand);
980 right_operand = convert (operation_type, right_operand);
983 if (modulus && !integer_pow2p (modulus))
985 result = nonbinary_modular_operation (op_code, operation_type,
986 left_operand, right_operand);
989 /* If either operand is a NULL_EXPR, just return a new one. */
990 else if (TREE_CODE (left_operand) == NULL_EXPR)
991 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
992 else if (TREE_CODE (right_operand) == NULL_EXPR)
993 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
994 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
995 result = fold (build4 (op_code, operation_type, left_operand,
996 right_operand, NULL_TREE, NULL_TREE));
999 = fold (build2 (op_code, operation_type, left_operand, right_operand));
1001 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1002 TREE_CONSTANT (result)
1003 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1004 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1006 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1007 && TYPE_VOLATILE (operation_type))
1008 TREE_THIS_VOLATILE (result) = 1;
1010 /* If we are working with modular types, perform the MOD operation
1011 if something above hasn't eliminated the need for it. */
1013 result = fold (build2 (FLOOR_MOD_EXPR, operation_type, result,
1014 convert (operation_type, modulus)));
1016 if (result_type && result_type != operation_type)
1017 result = convert (result_type, result);
1022 /* Similar, but for unary operations. */
1025 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1027 tree type = TREE_TYPE (operand);
1028 tree base_type = get_base_type (type);
1029 tree operation_type = result_type;
1031 bool side_effects = false;
1034 && TREE_CODE (operation_type) == RECORD_TYPE
1035 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1036 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1039 && !AGGREGATE_TYPE_P (operation_type)
1040 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1041 operation_type = get_base_type (operation_type);
1047 if (!operation_type)
1048 result_type = operation_type = TREE_TYPE (type);
1050 gcc_assert (result_type == TREE_TYPE (type));
1052 result = fold (build1 (op_code, operation_type, operand));
1055 case TRUTH_NOT_EXPR:
1056 gcc_assert (result_type == base_type);
1057 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1060 case ATTR_ADDR_EXPR:
1062 switch (TREE_CODE (operand))
1065 case UNCONSTRAINED_ARRAY_REF:
1066 result = TREE_OPERAND (operand, 0);
1068 /* Make sure the type here is a pointer, not a reference.
1069 GCC wants pointer types for function addresses. */
1071 result_type = build_pointer_type (type);
1076 TREE_TYPE (result) = type = build_pointer_type (type);
1080 case ARRAY_RANGE_REF:
1083 /* If this is for 'Address, find the address of the prefix and
1084 add the offset to the field. Otherwise, do this the normal
1086 if (op_code == ATTR_ADDR_EXPR)
1088 HOST_WIDE_INT bitsize;
1089 HOST_WIDE_INT bitpos;
1091 enum machine_mode mode;
1092 int unsignedp, volatilep;
1094 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1095 &mode, &unsignedp, &volatilep,
1098 /* If INNER is a padding type whose field has a self-referential
1099 size, convert to that inner type. We know the offset is zero
1100 and we need to have that type visible. */
1101 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1102 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1103 && (CONTAINS_PLACEHOLDER_P
1104 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1105 (TREE_TYPE (inner)))))))
1106 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1109 /* Compute the offset as a byte offset from INNER. */
1111 offset = size_zero_node;
1113 if (bitpos % BITS_PER_UNIT != 0)
1115 ("taking address of object not aligned on storage unit?",
1118 offset = size_binop (PLUS_EXPR, offset,
1119 size_int (bitpos / BITS_PER_UNIT));
1121 /* Take the address of INNER, convert the offset to void *, and
1122 add then. It will later be converted to the desired result
1124 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1125 inner = convert (ptr_void_type_node, inner);
1126 offset = convert (ptr_void_type_node, offset);
1127 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1129 result = convert (build_pointer_type (TREE_TYPE (operand)),
1136 /* If this is just a constructor for a padded record, we can
1137 just take the address of the single field and convert it to
1138 a pointer to our type. */
1139 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1142 = build_unary_op (ADDR_EXPR, NULL_TREE,
1143 TREE_VALUE (CONSTRUCTOR_ELTS (operand)));
1144 result = convert (build_pointer_type (TREE_TYPE (operand)),
1152 if (AGGREGATE_TYPE_P (type)
1153 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1154 return build_unary_op (ADDR_EXPR, result_type,
1155 TREE_OPERAND (operand, 0));
1157 /* If this NOP_EXPR doesn't change the mode, get the result type
1158 from this type and go down. We need to do this in case
1159 this is a conversion of a CONST_DECL. */
1160 if (TYPE_MODE (type) != BLKmode
1161 && (TYPE_MODE (type)
1162 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1163 return build_unary_op (ADDR_EXPR,
1164 (result_type ? result_type
1165 : build_pointer_type (type)),
1166 TREE_OPERAND (operand, 0));
1170 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1172 /* ... fall through ... */
1177 /* If we are taking the address of a padded record whose field is
1178 contains a template, take the address of the template. */
1179 if (TREE_CODE (type) == RECORD_TYPE
1180 && TYPE_IS_PADDING_P (type)
1181 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1182 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1184 type = TREE_TYPE (TYPE_FIELDS (type));
1185 operand = convert (type, operand);
1188 if (type != error_mark_node)
1189 operation_type = build_pointer_type (type);
1191 gnat_mark_addressable (operand);
1192 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1195 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1199 /* If we want to refer to an entire unconstrained array,
1200 make up an expression to do so. This will never survive to
1201 the backend. If TYPE is a thin pointer, first convert the
1202 operand to a fat pointer. */
1203 if (TYPE_THIN_POINTER_P (type)
1204 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1207 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1209 type = TREE_TYPE (operand);
1212 if (TYPE_FAT_POINTER_P (type))
1214 result = build1 (UNCONSTRAINED_ARRAY_REF,
1215 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1216 TREE_READONLY (result) = TREE_STATIC (result)
1217 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1219 else if (TREE_CODE (operand) == ADDR_EXPR)
1220 result = TREE_OPERAND (operand, 0);
1224 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1225 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1229 = (!TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1235 tree modulus = ((operation_type
1236 && TREE_CODE (operation_type) == INTEGER_TYPE
1237 && TYPE_MODULAR_P (operation_type))
1238 ? TYPE_MODULUS (operation_type) : 0);
1239 int mod_pow2 = modulus && integer_pow2p (modulus);
1241 /* If this is a modular type, there are various possibilities
1242 depending on the operation and whether the modulus is a
1243 power of two or not. */
1247 gcc_assert (operation_type == base_type);
1248 operand = convert (operation_type, operand);
1250 /* The fastest in the negate case for binary modulus is
1251 the straightforward code; the TRUNC_MOD_EXPR below
1252 is an AND operation. */
1253 if (op_code == NEGATE_EXPR && mod_pow2)
1254 result = fold (build2 (TRUNC_MOD_EXPR, operation_type,
1255 fold (build1 (NEGATE_EXPR, operation_type,
1259 /* For nonbinary negate case, return zero for zero operand,
1260 else return the modulus minus the operand. If the modulus
1261 is a power of two minus one, we can do the subtraction
1262 as an XOR since it is equivalent and faster on most machines. */
1263 else if (op_code == NEGATE_EXPR && !mod_pow2)
1265 if (integer_pow2p (fold (build2 (PLUS_EXPR, operation_type,
1267 convert (operation_type,
1268 integer_one_node)))))
1269 result = fold (build2 (BIT_XOR_EXPR, operation_type,
1272 result = fold (build2 (MINUS_EXPR, operation_type,
1275 result = fold (build3 (COND_EXPR, operation_type,
1276 fold (build2 (NE_EXPR,
1281 integer_zero_node))),
1286 /* For the NOT cases, we need a constant equal to
1287 the modulus minus one. For a binary modulus, we
1288 XOR against the constant and subtract the operand from
1289 that constant for nonbinary modulus. */
1291 tree cnst = fold (build2 (MINUS_EXPR, operation_type, modulus,
1292 convert (operation_type,
1293 integer_one_node)));
1296 result = fold (build2 (BIT_XOR_EXPR, operation_type,
1299 result = fold (build2 (MINUS_EXPR, operation_type,
1307 /* ... fall through ... */
1310 gcc_assert (operation_type == base_type);
1311 result = fold (build1 (op_code, operation_type, convert (operation_type,
1317 TREE_SIDE_EFFECTS (result) = 1;
1318 if (TREE_CODE (result) == INDIRECT_REF)
1319 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1322 if (result_type && TREE_TYPE (result) != result_type)
1323 result = convert (result_type, result);
1328 /* Similar, but for COND_EXPR. */
1331 build_cond_expr (tree result_type, tree condition_operand,
1332 tree true_operand, tree false_operand)
1335 bool addr_p = false;
1337 /* The front-end verifies that result, true and false operands have same base
1338 type. Convert everything to the result type. */
1340 true_operand = convert (result_type, true_operand);
1341 false_operand = convert (result_type, false_operand);
1343 /* If the result type is unconstrained, take the address of
1344 the operands and then dereference our result. */
1345 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1346 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1349 result_type = build_pointer_type (result_type);
1350 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1351 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1354 result = fold (build3 (COND_EXPR, result_type, condition_operand,
1355 true_operand, false_operand));
1357 /* If either operand is a SAVE_EXPR (possibly surrounded by
1358 arithmetic, make sure it gets done. */
1359 true_operand = skip_simple_arithmetic (true_operand);
1360 false_operand = skip_simple_arithmetic (false_operand);
1362 if (TREE_CODE (true_operand) == SAVE_EXPR)
1363 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1365 if (TREE_CODE (false_operand) == SAVE_EXPR)
1366 result = build2 (COMPOUND_EXPR, result_type, false_operand, result);
1368 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1369 SAVE_EXPRs with side effects and not shared by both arms. */
1372 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1378 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1382 build_call_1_expr (tree fundecl, tree arg)
1384 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1385 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1386 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1389 TREE_SIDE_EFFECTS (call) = 1;
1394 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1398 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1400 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1401 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1402 chainon (chainon (NULL_TREE,
1403 build_tree_list (NULL_TREE, arg1)),
1404 build_tree_list (NULL_TREE, arg2)),
1407 TREE_SIDE_EFFECTS (call) = 1;
1412 /* Likewise to call FUNDECL with no arguments. */
1415 build_call_0_expr (tree fundecl)
1417 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1418 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1419 NULL_TREE, NULL_TREE);
1421 TREE_SIDE_EFFECTS (call) = 1;
1426 /* Call a function that raises an exception and pass the line number and file
1427 name, if requested. MSG says which exception function to call. */
1430 build_call_raise (int msg)
1432 tree fndecl = gnat_raise_decls[msg];
1434 = (Debug_Flag_NN || Exception_Locations_Suppressed) ? "" : ref_filename;
1435 int len = strlen (str) + 1;
1436 tree filename = build_string (len, str);
1438 TREE_TYPE (filename)
1439 = build_array_type (char_type_node,
1440 build_index_type (build_int_cst (NULL_TREE, len)));
1443 build_call_2_expr (fndecl,
1444 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1446 build_int_cst (NULL_TREE, input_line));
1449 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1452 gnat_build_constructor (tree type, tree list)
1455 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1456 bool side_effects = false;
1459 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1461 if (!TREE_CONSTANT (TREE_VALUE (elmt))
1462 || (TREE_CODE (type) == RECORD_TYPE
1463 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1464 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1465 || !initializer_constant_valid_p (TREE_VALUE (elmt),
1466 TREE_TYPE (TREE_VALUE (elmt))))
1467 allconstant = false;
1469 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1470 side_effects = true;
1472 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1473 be executing the code we generate here in that case, but handle it
1474 specially to avoid the cmpiler blowing up. */
1475 if (TREE_CODE (type) == RECORD_TYPE
1477 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1478 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1481 /* If TYPE is a RECORD_TYPE and the fields are not in the
1482 same order as their bit position, don't treat this as constant
1483 since varasm.c can't handle it. */
1484 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1486 tree last_pos = bitsize_zero_node;
1489 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1491 tree this_pos = bit_position (field);
1493 if (TREE_CODE (this_pos) != INTEGER_CST
1494 || tree_int_cst_lt (this_pos, last_pos))
1496 allconstant = false;
1500 last_pos = this_pos;
1504 result = build_constructor (type, list);
1505 TREE_CONSTANT (result) = TREE_INVARIANT (result)
1506 = TREE_STATIC (result) = allconstant;
1507 TREE_SIDE_EFFECTS (result) = side_effects;
1508 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1512 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1513 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1514 for the field. Don't fold the result if NO_FOLD_P is true.
1516 We also handle the fact that we might have been passed a pointer to the
1517 actual record and know how to look for fields in variant parts. */
1520 build_simple_component_ref (tree record_variable, tree component,
1521 tree field, bool no_fold_p)
1523 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1526 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1527 || TREE_CODE (record_type) == UNION_TYPE
1528 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1529 && TYPE_SIZE (record_type)
1530 && (component != 0) != (field != 0));
1532 /* If no field was specified, look for a field with the specified name
1533 in the current record only. */
1535 for (field = TYPE_FIELDS (record_type); field;
1536 field = TREE_CHAIN (field))
1537 if (DECL_NAME (field) == component)
1543 /* If this field is not in the specified record, see if we can find
1544 something in the record whose original field is the same as this one. */
1545 if (DECL_CONTEXT (field) != record_type)
1546 /* Check if there is a field with name COMPONENT in the record. */
1550 /* First loop thru normal components. */
1552 for (new_field = TYPE_FIELDS (record_type); new_field;
1553 new_field = TREE_CHAIN (new_field))
1554 if (DECL_ORIGINAL_FIELD (new_field) == field
1555 || new_field == DECL_ORIGINAL_FIELD (field)
1556 || (DECL_ORIGINAL_FIELD (field)
1557 && (DECL_ORIGINAL_FIELD (field)
1558 == DECL_ORIGINAL_FIELD (new_field))))
1561 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1562 the component in the first search. Doing this search in 2 steps
1563 is required to avoiding hidden homonymous fields in the
1567 for (new_field = TYPE_FIELDS (record_type); new_field;
1568 new_field = TREE_CHAIN (new_field))
1569 if (DECL_INTERNAL_P (new_field))
1572 = build_simple_component_ref (record_variable,
1573 NULL_TREE, new_field, no_fold_p);
1574 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1587 /* It would be nice to call "fold" here, but that can lose a type
1588 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1589 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record_variable, field,
1592 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1593 TREE_READONLY (ref) = 1;
1594 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1595 || TYPE_VOLATILE (record_type))
1596 TREE_THIS_VOLATILE (ref) = 1;
1598 return no_fold_p ? ref : fold (ref);
1601 /* Like build_simple_component_ref, except that we give an error if the
1602 reference could not be found. */
1605 build_component_ref (tree record_variable, tree component,
1606 tree field, bool no_fold_p)
1608 tree ref = build_simple_component_ref (record_variable, component, field,
1614 /* If FIELD was specified, assume this is an invalid user field so
1615 raise constraint error. Otherwise, we can't find the type to return, so
1618 return build1 (NULL_EXPR, TREE_TYPE (field),
1619 build_call_raise (CE_Discriminant_Check_Failed));
1622 /* Build a GCC tree to call an allocation or deallocation function.
1623 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1624 generate an allocator.
1626 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1627 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1628 storage pool to use. If not preset, malloc and free will be used except
1629 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1630 object dynamically on the stack frame. */
1633 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, unsigned align,
1634 Entity_Id gnat_proc, Entity_Id gnat_pool,
1637 tree gnu_align = size_int (align / BITS_PER_UNIT);
1639 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1641 if (Present (gnat_proc))
1643 /* The storage pools are obviously always tagged types, but the
1644 secondary stack uses the same mechanism and is not tagged */
1645 if (Is_Tagged_Type (Etype (gnat_pool)))
1647 /* The size is the third parameter; the alignment is the
1649 Entity_Id gnat_size_type
1650 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1651 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1652 tree gnu_proc = gnat_to_gnu (gnat_proc);
1653 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1654 tree gnu_pool = gnat_to_gnu (gnat_pool);
1655 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1656 tree gnu_args = NULL_TREE;
1659 /* The first arg is always the address of the storage pool; next
1660 comes the address of the object, for a deallocator, then the
1661 size and alignment. */
1663 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1667 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1670 = chainon (gnu_args,
1671 build_tree_list (NULL_TREE,
1672 convert (gnu_size_type, gnu_size)));
1674 = chainon (gnu_args,
1675 build_tree_list (NULL_TREE,
1676 convert (gnu_size_type, gnu_align)));
1678 gnu_call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1679 gnu_proc_addr, gnu_args, NULL_TREE);
1680 TREE_SIDE_EFFECTS (gnu_call) = 1;
1684 /* Secondary stack case. */
1687 /* The size is the second parameter */
1688 Entity_Id gnat_size_type
1689 = Etype (Next_Formal (First_Formal (gnat_proc)));
1690 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1691 tree gnu_proc = gnat_to_gnu (gnat_proc);
1692 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1693 tree gnu_args = NULL_TREE;
1696 /* The first arg is the address of the object, for a
1697 deallocator, then the size */
1700 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1703 = chainon (gnu_args,
1704 build_tree_list (NULL_TREE,
1705 convert (gnu_size_type, gnu_size)));
1707 gnu_call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1708 gnu_proc_addr, gnu_args, NULL_TREE);
1709 TREE_SIDE_EFFECTS (gnu_call) = 1;
1715 return build_call_1_expr (free_decl, gnu_obj);
1717 /* ??? For now, disable variable-sized allocators in the stack since
1718 we can't yet gimplify an ALLOCATE_EXPR. */
1719 else if (gnat_pool == -1
1720 && TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1722 /* If the size is a constant, we can put it in the fixed portion of
1723 the stack frame to avoid the need to adjust the stack pointer. */
1724 if (TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1727 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1728 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1730 = create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1731 gnu_array_type, NULL_TREE, false, false, false,
1732 false, NULL, gnat_node);
1734 return convert (ptr_void_type_node,
1735 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1740 return build2 (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1745 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1746 Check_No_Implicit_Heap_Alloc (gnat_node);
1747 return build_call_1_expr (malloc_decl, gnu_size);
1751 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1752 initial value is INIT, if INIT is nonzero. Convert the expression to
1753 RESULT_TYPE, which must be some type of pointer. Return the tree.
1754 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1755 the storage pool to use. GNAT_NODE is used to provide an error
1756 location for restriction violations messages. If IGNORE_INIT_TYPE is
1757 true, ignore the type of INIT for the purpose of determining the size;
1758 this will cause the maximum size to be allocated if TYPE is of
1759 self-referential size. */
1762 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1763 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1765 tree size = TYPE_SIZE_UNIT (type);
1768 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1769 if (init && TREE_CODE (init) == NULL_EXPR)
1770 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1772 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1773 sizes of the object and its template. Allocate the whole thing and
1774 fill in the parts that are known. */
1775 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1778 = (TYPE_FAT_POINTER_P (result_type)
1779 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1780 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1782 = build_unc_object_type (template_type, type,
1783 get_identifier ("ALLOC"));
1784 tree storage_ptr_type = build_pointer_type (storage_type);
1786 tree template_cons = NULL_TREE;
1788 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1791 /* If the size overflows, pass -1 so the allocator will raise
1793 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1794 size = ssize_int (-1);
1796 storage = build_call_alloc_dealloc (NULL_TREE, size,
1797 TYPE_ALIGN (storage_type),
1798 gnat_proc, gnat_pool, gnat_node);
1799 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1801 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1803 type = TREE_TYPE (TYPE_FIELDS (type));
1806 init = convert (type, init);
1809 /* If there is an initializing expression, make a constructor for
1810 the entire object including the bounds and copy it into the
1811 object. If there is no initializing expression, just set the
1815 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1817 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1818 build_template (template_type, type,
1824 build2 (COMPOUND_EXPR, storage_ptr_type,
1826 (MODIFY_EXPR, storage_type,
1827 build_unary_op (INDIRECT_REF, NULL_TREE,
1828 convert (storage_ptr_type, storage)),
1829 gnat_build_constructor (storage_type, template_cons)),
1830 convert (storage_ptr_type, storage)));
1834 (COMPOUND_EXPR, result_type,
1836 (MODIFY_EXPR, template_type,
1838 (build_unary_op (INDIRECT_REF, NULL_TREE,
1839 convert (storage_ptr_type, storage)),
1840 NULL_TREE, TYPE_FIELDS (storage_type), 0),
1841 build_template (template_type, type, NULL_TREE)),
1842 convert (result_type, convert (storage_ptr_type, storage)));
1845 /* If we have an initializing expression, see if its size is simpler
1846 than the size from the type. */
1847 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
1848 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1849 || CONTAINS_PLACEHOLDER_P (size)))
1850 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1852 /* If the size is still self-referential, reference the initializing
1853 expression, if it is present. If not, this must have been a
1854 call to allocate a library-level object, in which case we use
1855 the maximum size. */
1856 if (CONTAINS_PLACEHOLDER_P (size))
1858 if (!ignore_init_type && init)
1859 size = substitute_placeholder_in_expr (size, init);
1861 size = max_size (size, true);
1864 /* If the size overflows, pass -1 so the allocator will raise
1866 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1867 size = ssize_int (-1);
1869 /* If this is a type whose alignment is larger than the
1870 biggest we support in normal alignment and this is in
1871 the default storage pool, make an "aligning type", allocate
1872 it, point to the field we need, and return that. */
1873 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1876 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1878 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (new_type),
1879 BIGGEST_ALIGNMENT, Empty,
1881 result = save_expr (result);
1882 result = convert (build_pointer_type (new_type), result);
1883 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1884 result = build_component_ref (result, NULL_TREE,
1885 TYPE_FIELDS (new_type), 0);
1886 result = convert (result_type,
1887 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1890 result = convert (result_type,
1891 build_call_alloc_dealloc (NULL_TREE, size,
1897 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1898 the value, and return the address. Do this with a COMPOUND_EXPR. */
1902 result = save_expr (result);
1904 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1906 (MODIFY_EXPR, NULL_TREE,
1907 build_unary_op (INDIRECT_REF,
1908 TREE_TYPE (TREE_TYPE (result)), result),
1913 return convert (result_type, result);
1916 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1917 GNAT_FORMAL is how we find the descriptor record. */
1920 fill_vms_descriptor (tree expr, Entity_Id gnat_formal)
1922 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
1924 tree const_list = NULL_TREE;
1926 expr = maybe_unconstrained_array (expr);
1927 gnat_mark_addressable (expr);
1929 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
1932 convert (TREE_TYPE (field),
1933 SUBSTITUTE_PLACEHOLDER_IN_EXPR
1934 (DECL_INITIAL (field), expr)),
1937 return gnat_build_constructor (record_type, nreverse (const_list));
1940 /* Indicate that we need to make the address of EXPR_NODE and it therefore
1941 should not be allocated in a register. Returns true if successful. */
1944 gnat_mark_addressable (tree expr_node)
1947 switch (TREE_CODE (expr_node))
1952 case ARRAY_RANGE_REF:
1955 case VIEW_CONVERT_EXPR:
1957 case NON_LVALUE_EXPR:
1959 expr_node = TREE_OPERAND (expr_node, 0);
1963 TREE_ADDRESSABLE (expr_node) = 1;
1969 TREE_ADDRESSABLE (expr_node) = 1;
1973 TREE_ADDRESSABLE (expr_node) = 1;
1977 return (DECL_CONST_CORRESPONDING_VAR (expr_node)
1978 && (gnat_mark_addressable
1979 (DECL_CONST_CORRESPONDING_VAR (expr_node))));