1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
48 static tree find_common_type (tree, tree);
49 static int contains_save_expr_p (tree);
50 static tree contains_null_expr (tree);
51 static tree compare_arrays (tree, tree, tree);
52 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
53 static tree build_simple_component_ref (tree, tree, tree, int);
55 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
58 This preparation consists of taking the ordinary representation of
59 an expression expr and producing a valid tree boolean expression
60 describing whether expr is nonzero. We could simply always do
62 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
64 but we optimize comparisons, &&, ||, and !.
66 The resulting type should always be the same as the input type.
67 This function is simpler than the corresponding C version since
68 the only possible operands will be things of Boolean type. */
71 gnat_truthvalue_conversion (tree expr)
73 tree type = TREE_TYPE (expr);
75 switch (TREE_CODE (expr))
77 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
78 case LT_EXPR: case GT_EXPR:
79 case TRUTH_ANDIF_EXPR:
88 return (integer_zerop (expr) ? convert (type, integer_zero_node)
89 : convert (type, integer_one_node));
92 return (real_zerop (expr) ? convert (type, integer_zero_node)
93 : convert (type, integer_one_node));
96 /* Distribute the conversion into the arms of a COND_EXPR. */
98 (build (COND_EXPR, type, TREE_OPERAND (expr, 0),
99 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
100 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
103 return build_binary_op (NE_EXPR, type, expr,
104 convert (type, integer_zero_node));
108 /* Return the base type of TYPE. */
111 get_base_type (tree type)
113 if (TREE_CODE (type) == RECORD_TYPE
114 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type))
115 type = TREE_TYPE (TYPE_FIELDS (type));
117 while (TREE_TYPE (type) != 0
118 && (TREE_CODE (type) == INTEGER_TYPE
119 || TREE_CODE (type) == REAL_TYPE))
120 type = TREE_TYPE (type);
125 /* Likewise, but only return types known to the Ada source. */
127 get_ada_base_type (tree type)
129 while (TREE_TYPE (type) != 0
130 && (TREE_CODE (type) == INTEGER_TYPE
131 || TREE_CODE (type) == REAL_TYPE)
132 && ! TYPE_EXTRA_SUBTYPE_P (type))
133 type = TREE_TYPE (type);
138 /* EXP is a GCC tree representing an address. See if we can find how
139 strictly the object at that address is aligned. Return that alignment
140 in bits. If we don't know anything about the alignment, return 0. */
143 known_alignment (tree exp)
145 unsigned int this_alignment;
146 unsigned int lhs, rhs;
147 unsigned int type_alignment;
149 /* For pointer expressions, we know that the designated object is always at
150 least as strictly aligned as the designated subtype, so we account for
151 both type and expression information in this case.
153 Beware that we can still get a dummy designated subtype here (e.g. Taft
154 Amendement types), in which the alignment information is meaningless and
157 We always compute a type_alignment value and return the MAX of it
158 compared with what we get from the expression tree. Just set the
159 type_alignment value to 0 when the type information is to be ignored. */
161 = ((POINTER_TYPE_P (TREE_TYPE (exp))
162 && ! TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
163 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
165 switch (TREE_CODE (exp))
169 case NON_LVALUE_EXPR:
170 /* Conversions between pointers and integers don't change the alignment
171 of the underlying object. */
172 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
177 /* If two address are added, the alignment of the result is the
178 minimum of the two aligments. */
179 lhs = known_alignment (TREE_OPERAND (exp, 0));
180 rhs = known_alignment (TREE_OPERAND (exp, 1));
181 this_alignment = MIN (lhs, rhs);
185 /* The first part of this represents the lowest bit in the constant,
186 but is it in bytes, not bits. */
189 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
194 /* If we know the alignment of just one side, use it. Otherwise,
195 use the product of the alignments. */
196 lhs = known_alignment (TREE_OPERAND (exp, 0));
197 rhs = known_alignment (TREE_OPERAND (exp, 1));
199 if (lhs == 0 || rhs == 0)
200 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
202 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
206 this_alignment = expr_align (TREE_OPERAND (exp, 0));
214 return MAX (type_alignment, this_alignment);
217 /* We have a comparison or assignment operation on two types, T1 and T2,
218 which are both either array types or both record types.
219 Return the type that both operands should be converted to, if any.
220 Otherwise return zero. */
223 find_common_type (tree t1, tree t2)
225 /* If either type is non-BLKmode, use it. Note that we know that we will
226 not have any alignment problems since if we did the non-BLKmode
227 type could not have been used. */
228 if (TYPE_MODE (t1) != BLKmode)
230 else if (TYPE_MODE (t2) != BLKmode)
233 /* Otherwise, return the type that has a constant size. */
234 if (TREE_CONSTANT (TYPE_SIZE (t1)))
236 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
239 /* In this case, both types have variable size. It's probably
240 best to leave the "type mismatch" because changing it could
241 case a bad self-referential reference. */
245 /* See if EXP contains a SAVE_EXPR in a position where we would
248 ??? This is a real kludge, but is probably the best approach short
249 of some very general solution. */
252 contains_save_expr_p (tree exp)
254 switch (TREE_CODE (exp))
259 case ADDR_EXPR: case INDIRECT_REF:
261 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
262 return contains_save_expr_p (TREE_OPERAND (exp, 0));
265 return (CONSTRUCTOR_ELTS (exp) != 0
266 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp)));
269 return (contains_save_expr_p (TREE_VALUE (exp))
270 || (TREE_CHAIN (exp) != 0
271 && contains_save_expr_p (TREE_CHAIN (exp))));
278 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
279 it if so. This is used to detect types whose sizes involve computations
280 that are known to raise Constraint_Error. */
283 contains_null_expr (tree exp)
287 if (TREE_CODE (exp) == NULL_EXPR)
290 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
293 return contains_null_expr (TREE_OPERAND (exp, 0));
296 tem = contains_null_expr (TREE_OPERAND (exp, 0));
300 return contains_null_expr (TREE_OPERAND (exp, 1));
303 switch (TREE_CODE (exp))
306 return contains_null_expr (TREE_OPERAND (exp, 0));
309 tem = contains_null_expr (TREE_OPERAND (exp, 0));
313 tem = contains_null_expr (TREE_OPERAND (exp, 1));
317 return contains_null_expr (TREE_OPERAND (exp, 2));
328 /* Return an expression tree representing an equality comparison of
329 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
330 be of type RESULT_TYPE
332 Two arrays are equal in one of two ways: (1) if both have zero length
333 in some dimension (not necessarily the same dimension) or (2) if the
334 lengths in each dimension are equal and the data is equal. We perform the
335 length tests in as efficient a manner as possible. */
338 compare_arrays (tree result_type, tree a1, tree a2)
340 tree t1 = TREE_TYPE (a1);
341 tree t2 = TREE_TYPE (a2);
342 tree result = convert (result_type, integer_one_node);
343 tree a1_is_null = convert (result_type, integer_zero_node);
344 tree a2_is_null = convert (result_type, integer_zero_node);
345 int length_zero_p = 0;
347 /* Process each dimension separately and compare the lengths. If any
348 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
349 suppress the comparison of the data. */
350 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
352 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
353 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
354 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
355 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
356 tree bt = get_base_type (TREE_TYPE (lb1));
357 tree length1 = fold (build (MINUS_EXPR, bt, ub1, lb1));
358 tree length2 = fold (build (MINUS_EXPR, bt, ub2, lb2));
361 tree comparison, this_a1_is_null, this_a2_is_null;
363 /* If the length of the first array is a constant, swap our operands
364 unless the length of the second array is the constant zero.
365 Note that we have set the `length' values to the length - 1. */
366 if (TREE_CODE (length1) == INTEGER_CST
367 && ! integer_zerop (fold (build (PLUS_EXPR, bt, length2,
368 convert (bt, integer_one_node)))))
370 tem = a1, a1 = a2, a2 = tem;
371 tem = t1, t1 = t2, t2 = tem;
372 tem = lb1, lb1 = lb2, lb2 = tem;
373 tem = ub1, ub1 = ub2, ub2 = tem;
374 tem = length1, length1 = length2, length2 = tem;
375 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
378 /* If the length of this dimension in the second array is the constant
379 zero, we can just go inside the original bounds for the first
380 array and see if last < first. */
381 if (integer_zerop (fold (build (PLUS_EXPR, bt, length2,
382 convert (bt, integer_one_node)))))
384 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
385 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
387 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
388 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
389 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
392 this_a1_is_null = comparison;
393 this_a2_is_null = convert (result_type, integer_one_node);
396 /* If the length is some other constant value, we know that the
397 this dimension in the first array cannot be superflat, so we
398 can just use its length from the actual stored bounds. */
399 else if (TREE_CODE (length2) == INTEGER_CST)
401 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
402 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
403 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
404 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
405 nbt = get_base_type (TREE_TYPE (ub1));
408 = build_binary_op (EQ_EXPR, result_type,
409 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
410 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
412 /* Note that we know that UB2 and LB2 are constant and hence
413 cannot contain a PLACEHOLDER_EXPR. */
415 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
416 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
418 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
419 this_a2_is_null = convert (result_type, integer_zero_node);
422 /* Otherwise compare the computed lengths. */
425 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
426 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
429 = build_binary_op (EQ_EXPR, result_type, length1, length2);
432 = build_binary_op (LT_EXPR, result_type, length1,
433 convert (bt, integer_zero_node));
435 = build_binary_op (LT_EXPR, result_type, length2,
436 convert (bt, integer_zero_node));
439 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
442 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
443 this_a1_is_null, a1_is_null);
444 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
445 this_a2_is_null, a2_is_null);
451 /* Unless the size of some bound is known to be zero, compare the
452 data in the array. */
455 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
458 a1 = convert (type, a1), a2 = convert (type, a2);
460 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
461 fold (build (EQ_EXPR, result_type, a1, a2)));
465 /* The result is also true if both sizes are zero. */
466 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
467 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
468 a1_is_null, a2_is_null),
471 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
472 starting the comparison above since the place it would be otherwise
473 evaluated would be wrong. */
475 if (contains_save_expr_p (a1))
476 result = build (COMPOUND_EXPR, result_type, a1, result);
478 if (contains_save_expr_p (a2))
479 result = build (COMPOUND_EXPR, result_type, a2, result);
484 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
485 type TYPE. We know that TYPE is a modular type with a nonbinary
489 nonbinary_modular_operation (enum tree_code op_code,
494 tree modulus = TYPE_MODULUS (type);
495 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
496 unsigned int precision;
501 /* If this is an addition of a constant, convert it to a subtraction
502 of a constant since we can do that faster. */
503 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
504 rhs = fold (build (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
506 /* For the logical operations, we only need PRECISION bits. For
507 addition and subraction, we need one more and for multiplication we
508 need twice as many. But we never want to make a size smaller than
510 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
511 needed_precision += 1;
512 else if (op_code == MULT_EXPR)
513 needed_precision *= 2;
515 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
517 /* Unsigned will do for everything but subtraction. */
518 if (op_code == MINUS_EXPR)
521 /* If our type is the wrong signedness or isn't wide enough, make a new
522 type and convert both our operands to it. */
523 if (TYPE_PRECISION (op_type) < precision
524 || TYPE_UNSIGNED (op_type) != unsignedp)
526 /* Copy the node so we ensure it can be modified to make it modular. */
527 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
528 modulus = convert (op_type, modulus);
529 SET_TYPE_MODULUS (op_type, modulus);
530 TYPE_MODULAR_P (op_type) = 1;
531 lhs = convert (op_type, lhs);
532 rhs = convert (op_type, rhs);
535 /* Do the operation, then we'll fix it up. */
536 result = fold (build (op_code, op_type, lhs, rhs));
538 /* For multiplication, we have no choice but to do a full modulus
539 operation. However, we want to do this in the narrowest
541 if (op_code == MULT_EXPR)
543 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
544 modulus = convert (div_type, modulus);
545 SET_TYPE_MODULUS (div_type, modulus);
546 TYPE_MODULAR_P (div_type) = 1;
547 result = convert (op_type,
548 fold (build (TRUNC_MOD_EXPR, div_type,
549 convert (div_type, result), modulus)));
552 /* For subtraction, add the modulus back if we are negative. */
553 else if (op_code == MINUS_EXPR)
555 result = save_expr (result);
556 result = fold (build (COND_EXPR, op_type,
557 build (LT_EXPR, integer_type_node, result,
558 convert (op_type, integer_zero_node)),
559 fold (build (PLUS_EXPR, op_type,
564 /* For the other operations, subtract the modulus if we are >= it. */
567 result = save_expr (result);
568 result = fold (build (COND_EXPR, op_type,
569 build (GE_EXPR, integer_type_node,
571 fold (build (MINUS_EXPR, op_type,
576 return convert (type, result);
579 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
580 desired for the result. Usually the operation is to be performed
581 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
582 in which case the type to be used will be derived from the operands.
584 This function is very much unlike the ones for C and C++ since we
585 have already done any type conversion and matching required. All we
586 have to do here is validate the work done by SEM and handle subtypes. */
589 build_binary_op (enum tree_code op_code, tree result_type,
590 tree left_operand, tree right_operand)
592 tree left_type = TREE_TYPE (left_operand);
593 tree right_type = TREE_TYPE (right_operand);
594 tree left_base_type = get_base_type (left_type);
595 tree right_base_type = get_base_type (right_type);
596 tree operation_type = result_type;
600 int has_side_effects = 0;
602 if (operation_type != 0
603 && TREE_CODE (operation_type) == RECORD_TYPE
604 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
605 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
607 if (operation_type != 0
608 && ! AGGREGATE_TYPE_P (operation_type)
609 && TYPE_EXTRA_SUBTYPE_P (operation_type))
610 operation_type = get_base_type (operation_type);
612 modulus = (operation_type != 0 && TREE_CODE (operation_type) == INTEGER_TYPE
613 && TYPE_MODULAR_P (operation_type)
614 ? TYPE_MODULUS (operation_type) : 0);
619 /* If there were any integral or pointer conversions on LHS, remove
620 them; we'll be putting them back below if needed. Likewise for
621 conversions between array and record types. But don't do this if
622 the right operand is not BLKmode (for packed arrays)
623 unless we are not changing the mode. */
624 while ((TREE_CODE (left_operand) == CONVERT_EXPR
625 || TREE_CODE (left_operand) == NOP_EXPR
626 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
627 && (((INTEGRAL_TYPE_P (left_type)
628 || POINTER_TYPE_P (left_type))
629 && (INTEGRAL_TYPE_P (TREE_TYPE
630 (TREE_OPERAND (left_operand, 0)))
631 || POINTER_TYPE_P (TREE_TYPE
632 (TREE_OPERAND (left_operand, 0)))))
633 || (((TREE_CODE (left_type) == RECORD_TYPE
634 /* Don't remove conversions to left-justified modular
636 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
637 || TREE_CODE (left_type) == ARRAY_TYPE)
638 && ((TREE_CODE (TREE_TYPE
639 (TREE_OPERAND (left_operand, 0)))
641 || (TREE_CODE (TREE_TYPE
642 (TREE_OPERAND (left_operand, 0)))
644 && (TYPE_MODE (right_type) == BLKmode
645 || (TYPE_MODE (left_type)
646 == TYPE_MODE (TREE_TYPE
648 (left_operand, 0))))))))
650 left_operand = TREE_OPERAND (left_operand, 0);
651 left_type = TREE_TYPE (left_operand);
654 if (operation_type == 0)
655 operation_type = left_type;
657 /* If the RHS has a conversion between record and array types and
658 an inner type is no worse, use it. Note we cannot do this for
659 modular types or types with TYPE_ALIGN_OK, since the latter
660 might indicate a conversion between a root type and a class-wide
661 type, which we must not remove. */
662 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
663 && ((TREE_CODE (right_type) == RECORD_TYPE
664 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type)
665 && ! TYPE_ALIGN_OK (right_type)
666 && ! TYPE_IS_FAT_POINTER_P (right_type))
667 || TREE_CODE (right_type) == ARRAY_TYPE)
668 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
670 && ! (TYPE_LEFT_JUSTIFIED_MODULAR_P
671 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
673 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
674 && ! (TYPE_IS_FAT_POINTER_P
675 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
676 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
679 == find_common_type (right_type,
680 TREE_TYPE (TREE_OPERAND
681 (right_operand, 0))))
682 || right_type != best_type))
684 right_operand = TREE_OPERAND (right_operand, 0);
685 right_type = TREE_TYPE (right_operand);
688 /* If we are copying one array or record to another, find the best type
690 if (((TREE_CODE (left_type) == ARRAY_TYPE
691 && TREE_CODE (right_type) == ARRAY_TYPE)
692 || (TREE_CODE (left_type) == RECORD_TYPE
693 && TREE_CODE (right_type) == RECORD_TYPE))
694 && (best_type = find_common_type (left_type, right_type)) != 0)
695 operation_type = best_type;
697 /* If a class-wide type may be involved, force use of the RHS type. */
698 if (TREE_CODE (right_type) == RECORD_TYPE && TYPE_ALIGN_OK (right_type))
699 operation_type = right_type;
701 /* Ensure everything on the LHS is valid. If we have a field reference,
702 strip anything that get_inner_reference can handle. Then remove any
703 conversions with type types having the same code and mode. Mark
704 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
705 either an INDIRECT_REF or a decl. */
706 result = left_operand;
709 tree restype = TREE_TYPE (result);
711 if (TREE_CODE (result) == COMPONENT_REF
712 || TREE_CODE (result) == ARRAY_REF
713 || TREE_CODE (result) == ARRAY_RANGE_REF)
714 while (handled_component_p (result))
715 result = TREE_OPERAND (result, 0);
716 else if (TREE_CODE (result) == REALPART_EXPR
717 || TREE_CODE (result) == IMAGPART_EXPR
718 || ((TREE_CODE (result) == NOP_EXPR
719 || TREE_CODE (result) == CONVERT_EXPR)
720 && (((TREE_CODE (restype)
721 == TREE_CODE (TREE_TYPE
722 (TREE_OPERAND (result, 0))))
723 && (TYPE_MODE (TREE_TYPE
724 (TREE_OPERAND (result, 0)))
725 == TYPE_MODE (restype)))
726 || TYPE_ALIGN_OK (restype))))
727 result = TREE_OPERAND (result, 0);
728 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
730 TREE_ADDRESSABLE (result) = 1;
731 result = TREE_OPERAND (result, 0);
737 if (TREE_CODE (result) != INDIRECT_REF && TREE_CODE (result) != NULL_EXPR
738 && ! DECL_P (result))
741 /* Convert the right operand to the operation type unless
742 it is either already of the correct type or if the type
743 involves a placeholder, since the RHS may not have the same
745 if (operation_type != right_type
746 && (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type))))
748 right_operand = convert (operation_type, right_operand);
749 right_type = operation_type;
752 /* If the modes differ, make up a bogus type and convert the RHS to
753 it. This can happen with packed types. */
754 if (TYPE_MODE (left_type) != TYPE_MODE (right_type))
756 tree new_type = copy_node (left_type);
758 TYPE_SIZE (new_type) = TYPE_SIZE (right_type);
759 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (right_type);
760 TYPE_MAIN_VARIANT (new_type) = new_type;
761 right_operand = convert (new_type, right_operand);
764 has_side_effects = 1;
769 if (operation_type == 0)
770 operation_type = TREE_TYPE (left_type);
772 /* ... fall through ... */
774 case ARRAY_RANGE_REF:
776 /* First convert the right operand to its base type. This will
777 prevent unneed signedness conversions when sizetype is wider than
779 right_operand = convert (right_base_type, right_operand);
780 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
782 if (! TREE_CONSTANT (right_operand)
783 || ! TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
784 gnat_mark_addressable (left_operand);
793 if (POINTER_TYPE_P (left_type))
796 /* ... fall through ... */
800 /* If either operand is a NULL_EXPR, just return a new one. */
801 if (TREE_CODE (left_operand) == NULL_EXPR)
802 return build (op_code, result_type,
803 build1 (NULL_EXPR, integer_type_node,
804 TREE_OPERAND (left_operand, 0)),
807 else if (TREE_CODE (right_operand) == NULL_EXPR)
808 return build (op_code, result_type,
809 build1 (NULL_EXPR, integer_type_node,
810 TREE_OPERAND (right_operand, 0)),
813 /* If either object is a left-justified modular types, get the
814 fields from within. */
815 if (TREE_CODE (left_type) == RECORD_TYPE
816 && TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
818 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
820 left_type = TREE_TYPE (left_operand);
821 left_base_type = get_base_type (left_type);
824 if (TREE_CODE (right_type) == RECORD_TYPE
825 && TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type))
827 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
829 right_type = TREE_TYPE (right_operand);
830 right_base_type = get_base_type (right_type);
833 /* If both objects are arrays, compare them specially. */
834 if ((TREE_CODE (left_type) == ARRAY_TYPE
835 || (TREE_CODE (left_type) == INTEGER_TYPE
836 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
837 && (TREE_CODE (right_type) == ARRAY_TYPE
838 || (TREE_CODE (right_type) == INTEGER_TYPE
839 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
841 result = compare_arrays (result_type, left_operand, right_operand);
843 if (op_code == EQ_EXPR)
845 else if (op_code == NE_EXPR)
846 result = invert_truthvalue (result);
853 /* Otherwise, the base types must be the same unless the objects are
854 records. If we have records, use the best type and convert both
855 operands to that type. */
856 if (left_base_type != right_base_type)
858 if (TREE_CODE (left_base_type) == RECORD_TYPE
859 && TREE_CODE (right_base_type) == RECORD_TYPE)
861 /* The only way these are permitted to be the same is if both
862 types have the same name. In that case, one of them must
863 not be self-referential. Use that one as the best type.
864 Even better is if one is of fixed size. */
867 if (TYPE_NAME (left_base_type) == 0
868 || TYPE_NAME (left_base_type) != TYPE_NAME (right_base_type))
871 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
872 best_type = left_base_type;
873 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
874 best_type = right_base_type;
875 else if (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
876 best_type = left_base_type;
877 else if (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
878 best_type = right_base_type;
882 left_operand = convert (best_type, left_operand);
883 right_operand = convert (best_type, right_operand);
889 /* If we are comparing a fat pointer against zero, we need to
890 just compare the data pointer. */
891 else if (TYPE_FAT_POINTER_P (left_base_type)
892 && TREE_CODE (right_operand) == CONSTRUCTOR
893 && integer_zerop (TREE_VALUE
894 (CONSTRUCTOR_ELTS (right_operand))))
896 right_operand = build_component_ref (left_operand, NULL_TREE,
897 TYPE_FIELDS (left_base_type),
899 left_operand = convert (TREE_TYPE (right_operand),
904 left_operand = convert (left_base_type, left_operand);
905 right_operand = convert (right_base_type, right_operand);
911 case PREINCREMENT_EXPR:
912 case PREDECREMENT_EXPR:
913 case POSTINCREMENT_EXPR:
914 case POSTDECREMENT_EXPR:
915 /* In these, the result type and the left operand type should be the
916 same. Do the operation in the base type of those and convert the
917 right operand (which is an integer) to that type.
919 Note that these operations are only used in loop control where
920 we guarantee that no overflow can occur. So nothing special need
921 be done for modular types. */
923 if (left_type != result_type)
926 operation_type = get_base_type (result_type);
927 left_operand = convert (operation_type, left_operand);
928 right_operand = convert (operation_type, right_operand);
929 has_side_effects = 1;
937 /* The RHS of a shift can be any type. Also, ignore any modulus
938 (we used to abort, but this is needed for unchecked conversion
939 to modular types). Otherwise, processing is the same as normal. */
940 if (operation_type != left_base_type)
944 left_operand = convert (operation_type, left_operand);
947 case TRUTH_ANDIF_EXPR:
948 case TRUTH_ORIF_EXPR:
952 left_operand = gnat_truthvalue_conversion (left_operand);
953 right_operand = gnat_truthvalue_conversion (right_operand);
959 /* For binary modulus, if the inputs are in range, so are the
961 if (modulus != 0 && integer_pow2p (modulus))
967 if (TREE_TYPE (result_type) != left_base_type
968 || TREE_TYPE (result_type) != right_base_type)
971 left_operand = convert (left_base_type, left_operand);
972 right_operand = convert (right_base_type, right_operand);
975 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
976 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
977 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
978 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
979 /* These always produce results lower than either operand. */
985 /* The result type should be the same as the base types of the
986 both operands (and they should be the same). Convert
987 everything to the result type. */
989 if (operation_type != left_base_type
990 || left_base_type != right_base_type)
993 left_operand = convert (operation_type, left_operand);
994 right_operand = convert (operation_type, right_operand);
997 if (modulus != 0 && ! integer_pow2p (modulus))
999 result = nonbinary_modular_operation (op_code, operation_type,
1000 left_operand, right_operand);
1003 /* If either operand is a NULL_EXPR, just return a new one. */
1004 else if (TREE_CODE (left_operand) == NULL_EXPR)
1005 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1006 else if (TREE_CODE (right_operand) == NULL_EXPR)
1007 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1008 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1009 result = fold (build (op_code, operation_type, left_operand, right_operand,
1010 NULL_TREE, NULL_TREE));
1013 = fold (build (op_code, operation_type, left_operand, right_operand));
1015 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1016 TREE_CONSTANT (result)
1017 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1018 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1020 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1021 && TYPE_VOLATILE (operation_type))
1022 TREE_THIS_VOLATILE (result) = 1;
1024 /* If we are working with modular types, perform the MOD operation
1025 if something above hasn't eliminated the need for it. */
1027 result = fold (build (FLOOR_MOD_EXPR, operation_type, result,
1028 convert (operation_type, modulus)));
1030 if (result_type != 0 && result_type != operation_type)
1031 result = convert (result_type, result);
1036 /* Similar, but for unary operations. */
1039 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1041 tree type = TREE_TYPE (operand);
1042 tree base_type = get_base_type (type);
1043 tree operation_type = result_type;
1045 int side_effects = 0;
1047 if (operation_type != 0
1048 && TREE_CODE (operation_type) == RECORD_TYPE
1049 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
1050 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1052 if (operation_type != 0
1053 && ! AGGREGATE_TYPE_P (operation_type)
1054 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1055 operation_type = get_base_type (operation_type);
1061 if (operation_type == 0)
1062 result_type = operation_type = TREE_TYPE (type);
1063 else if (result_type != TREE_TYPE (type))
1066 result = fold (build1 (op_code, operation_type, operand));
1069 case TRUTH_NOT_EXPR:
1070 if (result_type != base_type)
1073 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1076 case ATTR_ADDR_EXPR:
1078 switch (TREE_CODE (operand))
1081 case UNCONSTRAINED_ARRAY_REF:
1082 result = TREE_OPERAND (operand, 0);
1084 /* Make sure the type here is a pointer, not a reference.
1085 GCC wants pointer types for function addresses. */
1086 if (result_type == 0)
1087 result_type = build_pointer_type (type);
1092 TREE_TYPE (result) = type = build_pointer_type (type);
1096 case ARRAY_RANGE_REF:
1099 /* If this is for 'Address, find the address of the prefix and
1100 add the offset to the field. Otherwise, do this the normal
1102 if (op_code == ATTR_ADDR_EXPR)
1104 HOST_WIDE_INT bitsize;
1105 HOST_WIDE_INT bitpos;
1107 enum machine_mode mode;
1108 int unsignedp, volatilep;
1110 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1111 &mode, &unsignedp, &volatilep);
1113 /* If INNER is a padding type whose field has a self-referential
1114 size, convert to that inner type. We know the offset is zero
1115 and we need to have that type visible. */
1116 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1117 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1118 && (CONTAINS_PLACEHOLDER_P
1119 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1120 (TREE_TYPE (inner)))))))
1121 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1124 /* Compute the offset as a byte offset from INNER. */
1126 offset = size_zero_node;
1128 if (bitpos % BITS_PER_UNIT != 0)
1130 ("taking address of object not aligned on storage unit?",
1133 offset = size_binop (PLUS_EXPR, offset,
1134 size_int (bitpos / BITS_PER_UNIT));
1136 /* Take the address of INNER, convert the offset to void *, and
1137 add then. It will later be converted to the desired result
1139 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1140 inner = convert (ptr_void_type_node, inner);
1141 offset = convert (ptr_void_type_node, offset);
1142 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1144 result = convert (build_pointer_type (TREE_TYPE (operand)),
1151 /* If this is just a constructor for a padded record, we can
1152 just take the address of the single field and convert it to
1153 a pointer to our type. */
1154 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1157 = build_unary_op (ADDR_EXPR, NULL_TREE,
1158 TREE_VALUE (CONSTRUCTOR_ELTS (operand)));
1159 result = convert (build_pointer_type (TREE_TYPE (operand)),
1167 if (AGGREGATE_TYPE_P (type)
1168 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1169 return build_unary_op (ADDR_EXPR, result_type,
1170 TREE_OPERAND (operand, 0));
1172 /* If this NOP_EXPR doesn't change the mode, get the result type
1173 from this type and go down. We need to do this in case
1174 this is a conversion of a CONST_DECL. */
1175 if (TYPE_MODE (type) != BLKmode
1176 && (TYPE_MODE (type)
1177 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1178 return build_unary_op (ADDR_EXPR,
1180 ? build_pointer_type (type)
1182 TREE_OPERAND (operand, 0));
1186 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1188 /* ... fall through ... */
1193 /* If we are taking the address of a padded record whose field is
1194 contains a template, take the address of the template. */
1195 if (TREE_CODE (type) == RECORD_TYPE
1196 && TYPE_IS_PADDING_P (type)
1197 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1198 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1200 type = TREE_TYPE (TYPE_FIELDS (type));
1201 operand = convert (type, operand);
1204 if (type != error_mark_node)
1205 operation_type = build_pointer_type (type);
1207 gnat_mark_addressable (operand);
1208 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1211 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1215 /* If we want to refer to an entire unconstrained array,
1216 make up an expression to do so. This will never survive to
1217 the backend. If TYPE is a thin pointer, first convert the
1218 operand to a fat pointer. */
1219 if (TYPE_THIN_POINTER_P (type)
1220 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
1223 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1225 type = TREE_TYPE (operand);
1228 if (TYPE_FAT_POINTER_P (type))
1230 result = build1 (UNCONSTRAINED_ARRAY_REF,
1231 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1232 TREE_READONLY (result) = TREE_STATIC (result)
1233 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1235 else if (TREE_CODE (operand) == ADDR_EXPR)
1236 result = TREE_OPERAND (operand, 0);
1240 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1241 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1245 = (! TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1251 tree modulus = ((operation_type != 0
1252 && TREE_CODE (operation_type) == INTEGER_TYPE
1253 && TYPE_MODULAR_P (operation_type))
1254 ? TYPE_MODULUS (operation_type) : 0);
1255 int mod_pow2 = modulus != 0 && integer_pow2p (modulus);
1257 /* If this is a modular type, there are various possibilities
1258 depending on the operation and whether the modulus is a
1259 power of two or not. */
1263 if (operation_type != base_type)
1266 operand = convert (operation_type, operand);
1268 /* The fastest in the negate case for binary modulus is
1269 the straightforward code; the TRUNC_MOD_EXPR below
1270 is an AND operation. */
1271 if (op_code == NEGATE_EXPR && mod_pow2)
1272 result = fold (build (TRUNC_MOD_EXPR, operation_type,
1273 fold (build1 (NEGATE_EXPR, operation_type,
1277 /* For nonbinary negate case, return zero for zero operand,
1278 else return the modulus minus the operand. If the modulus
1279 is a power of two minus one, we can do the subtraction
1280 as an XOR since it is equivalent and faster on most machines. */
1281 else if (op_code == NEGATE_EXPR && ! mod_pow2)
1283 if (integer_pow2p (fold (build (PLUS_EXPR, operation_type,
1285 convert (operation_type,
1286 integer_one_node)))))
1287 result = fold (build (BIT_XOR_EXPR, operation_type,
1290 result = fold (build (MINUS_EXPR, operation_type,
1293 result = fold (build (COND_EXPR, operation_type,
1294 fold (build (NE_EXPR, integer_type_node,
1296 convert (operation_type,
1297 integer_zero_node))),
1302 /* For the NOT cases, we need a constant equal to
1303 the modulus minus one. For a binary modulus, we
1304 XOR against the constant and subtract the operand from
1305 that constant for nonbinary modulus. */
1307 tree cnst = fold (build (MINUS_EXPR, operation_type, modulus,
1308 convert (operation_type,
1309 integer_one_node)));
1312 result = fold (build (BIT_XOR_EXPR, operation_type,
1315 result = fold (build (MINUS_EXPR, operation_type,
1323 /* ... fall through ... */
1326 if (operation_type != base_type)
1329 result = fold (build1 (op_code, operation_type, convert (operation_type,
1335 TREE_SIDE_EFFECTS (result) = 1;
1336 if (TREE_CODE (result) == INDIRECT_REF)
1337 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1340 if (result_type != 0 && TREE_TYPE (result) != result_type)
1341 result = convert (result_type, result);
1346 /* Similar, but for COND_EXPR. */
1349 build_cond_expr (tree result_type, tree condition_operand,
1350 tree true_operand, tree false_operand)
1355 /* The front-end verifies that result, true and false operands have same base
1356 type. Convert everything to the result type. */
1358 true_operand = convert (result_type, true_operand);
1359 false_operand = convert (result_type, false_operand);
1361 /* If the result type is unconstrained, take the address of
1362 the operands and then dereference our result. */
1363 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1364 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1367 result_type = build_pointer_type (result_type);
1368 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1369 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1372 result = fold (build (COND_EXPR, result_type, condition_operand,
1373 true_operand, false_operand));
1375 /* If either operand is a SAVE_EXPR (possibly surrounded by
1376 arithmetic, make sure it gets done. */
1377 true_operand = skip_simple_arithmetic (true_operand);
1378 false_operand = skip_simple_arithmetic (false_operand);
1380 if (TREE_CODE (true_operand) == SAVE_EXPR)
1381 result = build (COMPOUND_EXPR, result_type, true_operand, result);
1383 if (TREE_CODE (false_operand) == SAVE_EXPR)
1384 result = build (COMPOUND_EXPR, result_type, false_operand, result);
1386 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1387 SAVE_EXPRs with side effects and not shared by both arms. */
1390 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1396 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1400 build_call_1_expr (tree fundecl, tree arg)
1402 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1403 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1404 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1407 TREE_SIDE_EFFECTS (call) = 1;
1412 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1416 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1418 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1419 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1420 chainon (chainon (NULL_TREE,
1421 build_tree_list (NULL_TREE, arg1)),
1422 build_tree_list (NULL_TREE, arg2)),
1425 TREE_SIDE_EFFECTS (call) = 1;
1430 /* Likewise to call FUNDECL with no arguments. */
1433 build_call_0_expr (tree fundecl)
1435 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1436 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1437 NULL_TREE, NULL_TREE);
1439 TREE_SIDE_EFFECTS (call) = 1;
1444 /* Call a function that raises an exception and pass the line number and file
1445 name, if requested. MSG says which exception function to call. */
1448 build_call_raise (int msg)
1450 tree fndecl = gnat_raise_decls[msg];
1451 const char *str = Debug_Flag_NN ? "" : ref_filename;
1452 int len = strlen (str) + 1;
1453 tree filename = build_string (len, str);
1455 TREE_TYPE (filename)
1456 = build_array_type (char_type_node,
1457 build_index_type (build_int_2 (len, 0)));
1460 build_call_2_expr (fndecl,
1461 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1463 build_int_2 (input_line, 0));
1466 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1469 gnat_build_constructor (tree type, tree list)
1472 int allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1473 int side_effects = 0;
1476 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1478 if (! TREE_CONSTANT (TREE_VALUE (elmt))
1479 || (TREE_CODE (type) == RECORD_TYPE
1480 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1481 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1482 || ! initializer_constant_valid_p (TREE_VALUE (elmt),
1483 TREE_TYPE (TREE_VALUE (elmt))))
1486 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1489 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1490 be executing the code we generate here in that case, but handle it
1491 specially to avoid the cmpiler blowing up. */
1492 if (TREE_CODE (type) == RECORD_TYPE
1494 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1495 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1498 /* If TYPE is a RECORD_TYPE and the fields are not in the
1499 same order as their bit position, don't treat this as constant
1500 since varasm.c can't handle it. */
1501 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1503 tree last_pos = bitsize_zero_node;
1506 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1508 tree this_pos = bit_position (field);
1510 if (TREE_CODE (this_pos) != INTEGER_CST
1511 || tree_int_cst_lt (this_pos, last_pos))
1517 last_pos = this_pos;
1521 result = build_constructor (type, list);
1522 TREE_CONSTANT (result) = allconstant;
1523 TREE_STATIC (result) = allconstant;
1524 TREE_SIDE_EFFECTS (result) = side_effects;
1525 TREE_READONLY (result) = TYPE_READONLY (type);
1530 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1531 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1532 for the field. Don't fold the result if NO_FOLD_P is nonzero.
1534 We also handle the fact that we might have been passed a pointer to the
1535 actual record and know how to look for fields in variant parts. */
1538 build_simple_component_ref (tree record_variable, tree component,
1539 tree field, int no_fold_p)
1541 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1544 if ((TREE_CODE (record_type) != RECORD_TYPE
1545 && TREE_CODE (record_type) != UNION_TYPE
1546 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1547 || TYPE_SIZE (record_type) == 0)
1550 /* Either COMPONENT or FIELD must be specified, but not both. */
1551 if ((component != 0) == (field != 0))
1554 /* If no field was specified, look for a field with the specified name
1555 in the current record only. */
1557 for (field = TYPE_FIELDS (record_type); field;
1558 field = TREE_CHAIN (field))
1559 if (DECL_NAME (field) == component)
1565 /* If this field is not in the specified record, see if we can find
1566 something in the record whose original field is the same as this one. */
1567 if (DECL_CONTEXT (field) != record_type)
1568 /* Check if there is a field with name COMPONENT in the record. */
1572 /* First loop thru normal components. */
1574 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1575 new_field = TREE_CHAIN (new_field))
1576 if (DECL_ORIGINAL_FIELD (new_field) == field
1577 || new_field == DECL_ORIGINAL_FIELD (field)
1578 || (DECL_ORIGINAL_FIELD (field) != 0
1579 && (DECL_ORIGINAL_FIELD (field)
1580 == DECL_ORIGINAL_FIELD (new_field))))
1583 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1584 the component in the first search. Doing this search in 2 steps
1585 is required to avoiding hidden homonymous fields in the
1589 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1590 new_field = TREE_CHAIN (new_field))
1591 if (DECL_INTERNAL_P (new_field))
1594 = build_simple_component_ref (record_variable,
1595 NULL_TREE, new_field, no_fold_p);
1596 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1609 /* It would be nice to call "fold" here, but that can lose a type
1610 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1611 ref = build (COMPONENT_REF, TREE_TYPE (field), record_variable, field,
1614 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1615 TREE_READONLY (ref) = 1;
1616 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1617 || TYPE_VOLATILE (record_type))
1618 TREE_THIS_VOLATILE (ref) = 1;
1620 return no_fold_p ? ref : fold (ref);
1623 /* Like build_simple_component_ref, except that we give an error if the
1624 reference could not be found. */
1627 build_component_ref (tree record_variable, tree component,
1628 tree field, int no_fold_p)
1630 tree ref = build_simple_component_ref (record_variable, component, field,
1636 /* If FIELD was specified, assume this is an invalid user field so
1637 raise constraint error. Otherwise, we can't find the type to return, so
1640 else if (field != 0)
1641 return build1 (NULL_EXPR, TREE_TYPE (field),
1642 build_call_raise (CE_Discriminant_Check_Failed));
1647 /* Build a GCC tree to call an allocation or deallocation function.
1648 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1649 generate an allocator.
1651 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1652 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1653 storage pool to use. If not preset, malloc and free will be used except
1654 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1655 object dynamically on the stack frame. */
1658 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, unsigned align,
1659 Entity_Id gnat_proc, Entity_Id gnat_pool,
1662 tree gnu_align = size_int (align / BITS_PER_UNIT);
1664 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1666 if (Present (gnat_proc))
1668 /* The storage pools are obviously always tagged types, but the
1669 secondary stack uses the same mechanism and is not tagged */
1670 if (Is_Tagged_Type (Etype (gnat_pool)))
1672 /* The size is the third parameter; the alignment is the
1674 Entity_Id gnat_size_type
1675 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1676 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1677 tree gnu_proc = gnat_to_gnu (gnat_proc);
1678 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1679 tree gnu_pool = gnat_to_gnu (gnat_pool);
1680 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1681 tree gnu_args = NULL_TREE;
1684 /* The first arg is always the address of the storage pool; next
1685 comes the address of the object, for a deallocator, then the
1686 size and alignment. */
1688 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1692 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1695 = chainon (gnu_args,
1696 build_tree_list (NULL_TREE,
1697 convert (gnu_size_type, gnu_size)));
1699 = chainon (gnu_args,
1700 build_tree_list (NULL_TREE,
1701 convert (gnu_size_type, gnu_align)));
1703 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1704 gnu_proc_addr, gnu_args, NULL_TREE);
1705 TREE_SIDE_EFFECTS (gnu_call) = 1;
1709 /* Secondary stack case. */
1712 /* The size is the second parameter */
1713 Entity_Id gnat_size_type
1714 = Etype (Next_Formal (First_Formal (gnat_proc)));
1715 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1716 tree gnu_proc = gnat_to_gnu (gnat_proc);
1717 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1718 tree gnu_args = NULL_TREE;
1721 /* The first arg is the address of the object, for a
1722 deallocator, then the size */
1725 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1728 = chainon (gnu_args,
1729 build_tree_list (NULL_TREE,
1730 convert (gnu_size_type, gnu_size)));
1732 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1733 gnu_proc_addr, gnu_args, NULL_TREE);
1734 TREE_SIDE_EFFECTS (gnu_call) = 1;
1740 return build_call_1_expr (free_decl, gnu_obj);
1742 /* ??? For now, disable variable-sized allocators in the stack since
1743 we can't yet gimplify an ALLOCATE_EXPR. */
1744 else if (gnat_pool == -1
1745 && TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1747 /* If the size is a constant, we can put it in the fixed portion of
1748 the stack frame to avoid the need to adjust the stack pointer. */
1749 if (TREE_CODE (gnu_size) == INTEGER_CST && ! flag_stack_check)
1752 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1753 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1755 = create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1756 gnu_array_type, NULL_TREE, 0, 0, 0, 0, 0,
1759 return convert (ptr_void_type_node,
1760 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1765 return build (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1770 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1771 Check_No_Implicit_Heap_Alloc (gnat_node);
1772 return build_call_1_expr (malloc_decl, gnu_size);
1776 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1777 initial value is INIT, if INIT is nonzero. Convert the expression to
1778 RESULT_TYPE, which must be some type of pointer. Return the tree.
1779 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1780 the storage pool to use. */
1783 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1784 Entity_Id gnat_pool, Node_Id gnat_node)
1786 tree size = TYPE_SIZE_UNIT (type);
1789 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1790 if (init != 0 && TREE_CODE (init) == NULL_EXPR)
1791 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1793 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1794 sizes of the object and its template. Allocate the whole thing and
1795 fill in the parts that are known. */
1796 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1799 = (TYPE_FAT_POINTER_P (result_type)
1800 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1801 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1803 = build_unc_object_type (template_type, type,
1804 get_identifier ("ALLOC"));
1805 tree storage_ptr_type = build_pointer_type (storage_type);
1807 tree template_cons = NULL_TREE;
1809 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1812 /* If the size overflows, pass -1 so the allocator will raise
1814 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1815 size = ssize_int (-1);
1817 storage = build_call_alloc_dealloc (NULL_TREE, size,
1818 TYPE_ALIGN (storage_type),
1819 gnat_proc, gnat_pool, gnat_node);
1820 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1822 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1824 type = TREE_TYPE (TYPE_FIELDS (type));
1827 init = convert (type, init);
1830 /* If there is an initializing expression, make a constructor for
1831 the entire object including the bounds and copy it into the
1832 object. If there is no initializing expression, just set the
1836 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1838 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1839 build_template (template_type, type,
1845 build (COMPOUND_EXPR, storage_ptr_type,
1847 (MODIFY_EXPR, storage_type,
1848 build_unary_op (INDIRECT_REF, NULL_TREE,
1849 convert (storage_ptr_type, storage)),
1850 gnat_build_constructor (storage_type, template_cons)),
1851 convert (storage_ptr_type, storage)));
1855 (COMPOUND_EXPR, result_type,
1857 (MODIFY_EXPR, template_type,
1859 (build_unary_op (INDIRECT_REF, NULL_TREE,
1860 convert (storage_ptr_type, storage)),
1861 NULL_TREE, TYPE_FIELDS (storage_type), 0),
1862 build_template (template_type, type, NULL_TREE)),
1863 convert (result_type, convert (storage_ptr_type, storage)));
1866 /* If we have an initializing expression, see if its size is simpler
1867 than the size from the type. */
1868 if (init != 0 && TYPE_SIZE_UNIT (TREE_TYPE (init)) != 0
1869 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1870 || CONTAINS_PLACEHOLDER_P (size)))
1871 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1873 /* If the size is still self-referential, reference the initializing
1874 expression, if it is present. If not, this must have been a
1875 call to allocate a library-level object, in which case we use
1876 the maximum size. */
1877 if (CONTAINS_PLACEHOLDER_P (size))
1880 size = max_size (size, 1);
1882 size = substitute_placeholder_in_expr (size, init);
1885 /* If the size overflows, pass -1 so the allocator will raise
1887 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1888 size = ssize_int (-1);
1890 /* If this is a type whose alignment is larger than the
1891 biggest we support in normal alignment and this is in
1892 the default storage pool, make an "aligning type", allocate
1893 it, point to the field we need, and return that. */
1894 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1897 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1899 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (new_type),
1900 BIGGEST_ALIGNMENT, Empty,
1902 result = save_expr (result);
1903 result = convert (build_pointer_type (new_type), result);
1904 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1905 result = build_component_ref (result, NULL_TREE,
1906 TYPE_FIELDS (new_type), 0);
1907 result = convert (result_type,
1908 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1911 result = convert (result_type,
1912 build_call_alloc_dealloc (NULL_TREE, size,
1918 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1919 the value, and return the address. Do this with a COMPOUND_EXPR. */
1923 result = save_expr (result);
1925 = build (COMPOUND_EXPR, TREE_TYPE (result),
1927 (MODIFY_EXPR, NULL_TREE,
1928 build_unary_op (INDIRECT_REF, TREE_TYPE (TREE_TYPE (result)),
1934 return convert (result_type, result);
1937 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1938 GNAT_FORMAL is how we find the descriptor record. */
1941 fill_vms_descriptor (tree expr, Entity_Id gnat_formal)
1943 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
1945 tree const_list = 0;
1947 expr = maybe_unconstrained_array (expr);
1948 gnat_mark_addressable (expr);
1950 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
1953 convert (TREE_TYPE (field),
1954 SUBSTITUTE_PLACEHOLDER_IN_EXPR
1955 (DECL_INITIAL (field), expr)),
1958 return gnat_build_constructor (record_type, nreverse (const_list));
1961 /* Indicate that we need to make the address of EXPR_NODE and it therefore
1962 should not be allocated in a register. Returns true if successful. */
1965 gnat_mark_addressable (tree expr_node)
1968 switch (TREE_CODE (expr_node))
1973 case ARRAY_RANGE_REF:
1976 case VIEW_CONVERT_EXPR:
1978 case NON_LVALUE_EXPR:
1980 expr_node = TREE_OPERAND (expr_node, 0);
1984 TREE_ADDRESSABLE (expr_node) = 1;
1990 TREE_ADDRESSABLE (expr_node) = 1;
1994 TREE_ADDRESSABLE (expr_node) = 1;
1998 return (DECL_CONST_CORRESPONDING_VAR (expr_node) != 0
1999 && (gnat_mark_addressable
2000 (DECL_CONST_CORRESPONDING_VAR (expr_node))));