1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 || (overflowable > 0 && sign_extended_type))
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
277 TREE_CONSTANT_OVERFLOW (t) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
298 h = h1 + h2 + (l < l1);
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 return (*hv & h1) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
355 for (j = 0; j < 4; j++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 if (SHIFT_COUNT_TRUNCATED)
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 unsigned HOST_WIDE_INT signmask;
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 if (SHIFT_COUNT_TRUNCATED)
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
663 { /* scale divisor and dividend */
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
742 decode (quo, lquo, hquo);
745 /* If result is negative, make it so. */
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, <wice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code)
888 /* Check whether we may negate an integer constant T without causing
892 may_negate_without_overflow_p (tree t)
894 unsigned HOST_WIDE_INT val;
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
907 if (TREE_INT_CST_LOW (t) != 0)
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
923 negate_expr_p (tree t)
930 type = TREE_TYPE (t);
933 switch (TREE_CODE (t))
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
983 tree tem = strip_float_extensions (t);
985 return negate_expr_p (tem);
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1017 negate_expr (tree t)
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1028 switch (TREE_CODE (t))
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1116 TREE_OPERAND (t, 1)));
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1135 tree fndecl, arg, arglist;
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1238 var = op1, neg_var_p = neg1_p;
1240 /* Now do any needed negations. */
1242 *minus_litp = *litp, *litp = 0;
1244 *conp = negate_expr (*conp);
1246 var = negate_expr (var);
1248 else if (TREE_CONSTANT (in))
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1284 if (code == PLUS_EXPR)
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1295 else if (code == MINUS_EXPR)
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1338 low = int1l | int2l, hi = int1h | int2h;
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1346 low = int1l & int2l, hi = int1h & int2h;
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1389 if (code == CEIL_DIV_EXPR)
1392 low = int1l / int2l, hi = 0;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1401 low = int1l, hi = int1h;
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1422 if (code == CEIL_MOD_EXPR)
1424 low = int1l % int2l, hi = 0;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1451 low = int2l, hi = int2h;
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1501 if (TREE_CODE (arg1) == REAL_CST)
1503 enum machine_mode mode;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1534 else if (REAL_VALUE_ISNAN (d2))
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1540 /* Don't constant fold this floating point operation if the
1541 result may dependent upon the run-time rounding mode and
1542 flag_rounding_math is set, or if GCC's software emulation
1543 is unable to accurately represent the result. */
1545 if ((flag_rounding_math
1546 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1547 && !flag_unsafe_math_optimizations))
1548 && (inexact || !real_identical (&result, &value)))
1551 t = build_real (type, result);
1553 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1554 TREE_CONSTANT_OVERFLOW (t)
1556 | TREE_CONSTANT_OVERFLOW (arg1)
1557 | TREE_CONSTANT_OVERFLOW (arg2);
1560 if (TREE_CODE (arg1) == COMPLEX_CST)
1562 tree type = TREE_TYPE (arg1);
1563 tree r1 = TREE_REALPART (arg1);
1564 tree i1 = TREE_IMAGPART (arg1);
1565 tree r2 = TREE_REALPART (arg2);
1566 tree i2 = TREE_IMAGPART (arg2);
1572 t = build_complex (type,
1573 const_binop (PLUS_EXPR, r1, r2, notrunc),
1574 const_binop (PLUS_EXPR, i1, i2, notrunc));
1578 t = build_complex (type,
1579 const_binop (MINUS_EXPR, r1, r2, notrunc),
1580 const_binop (MINUS_EXPR, i1, i2, notrunc));
1584 t = build_complex (type,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR,
1588 const_binop (MULT_EXPR,
1591 const_binop (PLUS_EXPR,
1592 const_binop (MULT_EXPR,
1594 const_binop (MULT_EXPR,
1601 tree t1, t2, real, imag;
1603 = const_binop (PLUS_EXPR,
1604 const_binop (MULT_EXPR, r2, r2, notrunc),
1605 const_binop (MULT_EXPR, i2, i2, notrunc),
1608 t1 = const_binop (PLUS_EXPR,
1609 const_binop (MULT_EXPR, r1, r2, notrunc),
1610 const_binop (MULT_EXPR, i1, i2, notrunc),
1612 t2 = const_binop (MINUS_EXPR,
1613 const_binop (MULT_EXPR, i1, r2, notrunc),
1614 const_binop (MULT_EXPR, r1, i2, notrunc),
1617 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1619 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1620 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1624 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1625 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1630 t = build_complex (type, real, imag);
1642 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1643 indicates which particular sizetype to create. */
1646 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1648 return build_int_cst (sizetype_tab[(int) kind], number);
1651 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1652 is a tree code. The type of the result is taken from the operands.
1653 Both must be the same type integer type and it must be a size type.
1654 If the operands are constant, so is the result. */
1657 size_binop (enum tree_code code, tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1661 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1662 && type == TREE_TYPE (arg1));
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1680 if (arg0 == error_mark_node || arg1 == error_mark_node)
1681 return error_mark_node;
1683 return fold_build2 (code, type, arg0, arg1);
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1691 size_diffop (tree arg0, tree arg1)
1693 tree type = TREE_TYPE (arg0);
1696 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1697 && type == TREE_TYPE (arg1));
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type))
1701 return size_binop (MINUS_EXPR, arg0, arg1);
1703 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1726 /* A subroutine of fold_convert_const handling conversions of an
1727 INTEGER_CST to another integer type. */
1730 fold_convert_const_int_from_int (tree type, tree arg1)
1734 /* Given an integer constant, make new constant with new type,
1735 appropriately sign-extended or truncated. */
1736 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1737 TREE_INT_CST_HIGH (arg1));
1739 t = force_fit_type (t,
1740 /* Don't set the overflow when
1741 converting a pointer */
1742 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1743 (TREE_INT_CST_HIGH (arg1) < 0
1744 && (TYPE_UNSIGNED (type)
1745 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1746 | TREE_OVERFLOW (arg1),
1747 TREE_CONSTANT_OVERFLOW (arg1));
1752 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1753 to an integer type. */
1756 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1761 /* The following code implements the floating point to integer
1762 conversion rules required by the Java Language Specification,
1763 that IEEE NaNs are mapped to zero and values that overflow
1764 the target precision saturate, i.e. values greater than
1765 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1766 are mapped to INT_MIN. These semantics are allowed by the
1767 C and C++ standards that simply state that the behavior of
1768 FP-to-integer conversion is unspecified upon overflow. */
1770 HOST_WIDE_INT high, low;
1772 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1776 case FIX_TRUNC_EXPR:
1777 real_trunc (&r, VOIDmode, &x);
1781 real_ceil (&r, VOIDmode, &x);
1784 case FIX_FLOOR_EXPR:
1785 real_floor (&r, VOIDmode, &x);
1788 case FIX_ROUND_EXPR:
1789 real_round (&r, VOIDmode, &x);
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1804 /* See if R is less than the lower bound or greater than the
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1821 tree ut = TYPE_MAX_VALUE (type);
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1835 REAL_VALUE_TO_INT (&low, &high, r);
1837 t = build_int_cst_wide (type, low, high);
1839 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1840 TREE_CONSTANT_OVERFLOW (arg1));
1844 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1845 to another floating point type. */
1848 fold_convert_const_real_from_real (tree type, tree arg1)
1850 REAL_VALUE_TYPE value;
1853 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1854 t = build_real (type, value);
1856 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1857 TREE_CONSTANT_OVERFLOW (t)
1858 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1862 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1863 type TYPE. If no simplification can be done return NULL_TREE. */
1866 fold_convert_const (enum tree_code code, tree type, tree arg1)
1868 if (TREE_TYPE (arg1) == type)
1871 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1873 if (TREE_CODE (arg1) == INTEGER_CST)
1874 return fold_convert_const_int_from_int (type, arg1);
1875 else if (TREE_CODE (arg1) == REAL_CST)
1876 return fold_convert_const_int_from_real (code, type, arg1);
1878 else if (TREE_CODE (type) == REAL_TYPE)
1880 if (TREE_CODE (arg1) == INTEGER_CST)
1881 return build_real_from_int_cst (type, arg1);
1882 if (TREE_CODE (arg1) == REAL_CST)
1883 return fold_convert_const_real_from_real (type, arg1);
1888 /* Construct a vector of zero elements of vector type TYPE. */
1891 build_zero_vector (tree type)
1896 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1897 units = TYPE_VECTOR_SUBPARTS (type);
1900 for (i = 0; i < units; i++)
1901 list = tree_cons (NULL_TREE, elem, list);
1902 return build_vector (type, list);
1905 /* Convert expression ARG to type TYPE. Used by the middle-end for
1906 simple conversions in preference to calling the front-end's convert. */
1909 fold_convert (tree type, tree arg)
1911 tree orig = TREE_TYPE (arg);
1917 if (TREE_CODE (arg) == ERROR_MARK
1918 || TREE_CODE (type) == ERROR_MARK
1919 || TREE_CODE (orig) == ERROR_MARK)
1920 return error_mark_node;
1922 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1923 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1924 TYPE_MAIN_VARIANT (orig)))
1925 return fold_build1 (NOP_EXPR, type, arg);
1927 switch (TREE_CODE (type))
1929 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1932 if (TREE_CODE (arg) == INTEGER_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1938 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1939 || TREE_CODE (orig) == OFFSET_TYPE)
1940 return fold_build1 (NOP_EXPR, type, arg);
1941 if (TREE_CODE (orig) == COMPLEX_TYPE)
1943 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert (type, tem);
1946 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1947 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1948 return fold_build1 (NOP_EXPR, type, arg);
1951 if (TREE_CODE (arg) == INTEGER_CST)
1953 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1954 if (tem != NULL_TREE)
1957 else if (TREE_CODE (arg) == REAL_CST)
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1964 switch (TREE_CODE (orig))
1966 case INTEGER_TYPE: case CHAR_TYPE:
1967 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1968 case POINTER_TYPE: case REFERENCE_TYPE:
1969 return fold_build1 (FLOAT_EXPR, type, arg);
1972 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1976 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1977 return fold_convert (type, tem);
1984 switch (TREE_CODE (orig))
1986 case INTEGER_TYPE: case CHAR_TYPE:
1987 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1988 case POINTER_TYPE: case REFERENCE_TYPE:
1990 return build2 (COMPLEX_EXPR, type,
1991 fold_convert (TREE_TYPE (type), arg),
1992 fold_convert (TREE_TYPE (type), integer_zero_node));
1997 if (TREE_CODE (arg) == COMPLEX_EXPR)
1999 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2000 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2001 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2004 arg = save_expr (arg);
2005 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2006 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2007 rpart = fold_convert (TREE_TYPE (type), rpart);
2008 ipart = fold_convert (TREE_TYPE (type), ipart);
2009 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2017 if (integer_zerop (arg))
2018 return build_zero_vector (type);
2019 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2020 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == VECTOR_TYPE);
2022 return fold_build1 (NOP_EXPR, type, arg);
2025 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2032 /* Return false if expr can be assumed not to be an lvalue, true
2036 maybe_lvalue_p (tree x)
2038 /* We only need to wrap lvalue tree codes. */
2039 switch (TREE_CODE (x))
2050 case ALIGN_INDIRECT_REF:
2051 case MISALIGNED_INDIRECT_REF:
2053 case ARRAY_RANGE_REF:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2083 /* Return an expr equal to X but certainly not valid as an lvalue. */
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2093 if (! maybe_lvalue_p (x))
2095 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2101 int pedantic_lvalues;
2103 /* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2107 pedantic_non_lvalue (tree x)
2109 if (pedantic_lvalues)
2110 return non_lvalue (x);
2115 /* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2121 invert_tree_comparison (enum tree_code code, bool honor_nans)
2123 if (honor_nans && flag_trapping_math)
2133 return honor_nans ? UNLE_EXPR : LE_EXPR;
2135 return honor_nans ? UNLT_EXPR : LT_EXPR;
2137 return honor_nans ? UNGE_EXPR : GE_EXPR;
2139 return honor_nans ? UNGT_EXPR : GT_EXPR;
2153 return UNORDERED_EXPR;
2154 case UNORDERED_EXPR:
2155 return ORDERED_EXPR;
2161 /* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2165 swap_tree_comparison (enum tree_code code)
2172 case UNORDERED_EXPR:
2198 /* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2202 static enum comparison_code
2203 comparison_to_compcode (enum tree_code code)
2220 return COMPCODE_ORD;
2221 case UNORDERED_EXPR:
2222 return COMPCODE_UNORD;
2224 return COMPCODE_UNLT;
2226 return COMPCODE_UNEQ;
2228 return COMPCODE_UNLE;
2230 return COMPCODE_UNGT;
2232 return COMPCODE_LTGT;
2234 return COMPCODE_UNGE;
2240 /* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2244 static enum tree_code
2245 compcode_to_comparison (enum comparison_code code)
2262 return ORDERED_EXPR;
2263 case COMPCODE_UNORD:
2264 return UNORDERED_EXPR;
2282 /* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2289 combine_comparisons (enum tree_code code, enum tree_code lcode,
2290 enum tree_code rcode, tree truth_type,
2291 tree ll_arg, tree lr_arg)
2293 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2294 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2295 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2296 enum comparison_code compcode;
2300 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2301 compcode = lcompcode & rcompcode;
2304 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2305 compcode = lcompcode | rcompcode;
2314 /* Eliminate unordered comparisons, as well as LTGT and ORD
2315 which are not used unless the mode has NaNs. */
2316 compcode &= ~COMPCODE_UNORD;
2317 if (compcode == COMPCODE_LTGT)
2318 compcode = COMPCODE_NE;
2319 else if (compcode == COMPCODE_ORD)
2320 compcode = COMPCODE_TRUE;
2322 else if (flag_trapping_math)
2324 /* Check that the original operation and the optimized ones will trap
2325 under the same condition. */
2326 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2327 && (lcompcode != COMPCODE_EQ)
2328 && (lcompcode != COMPCODE_ORD);
2329 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2330 && (rcompcode != COMPCODE_EQ)
2331 && (rcompcode != COMPCODE_ORD);
2332 bool trap = (compcode & COMPCODE_UNORD) == 0
2333 && (compcode != COMPCODE_EQ)
2334 && (compcode != COMPCODE_ORD);
2336 /* In a short-circuited boolean expression the LHS might be
2337 such that the RHS, if evaluated, will never trap. For
2338 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2339 if neither x nor y is NaN. (This is a mixed blessing: for
2340 example, the expression above will never trap, hence
2341 optimizing it to x < y would be invalid). */
2342 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2343 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2346 /* If the comparison was short-circuited, and only the RHS
2347 trapped, we may now generate a spurious trap. */
2349 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2352 /* If we changed the conditions that cause a trap, we lose. */
2353 if ((ltrap || rtrap) != trap)
2357 if (compcode == COMPCODE_TRUE)
2358 return constant_boolean_node (true, truth_type);
2359 else if (compcode == COMPCODE_FALSE)
2360 return constant_boolean_node (false, truth_type);
2362 return fold_build2 (compcode_to_comparison (compcode),
2363 truth_type, ll_arg, lr_arg);
2366 /* Return nonzero if CODE is a tree code that represents a truth value. */
2369 truth_value_p (enum tree_code code)
2371 return (TREE_CODE_CLASS (code) == tcc_comparison
2372 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2373 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2374 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2377 /* Return nonzero if two operands (typically of the same tree node)
2378 are necessarily equal. If either argument has side-effects this
2379 function returns zero. FLAGS modifies behavior as follows:
2381 If OEP_ONLY_CONST is set, only return nonzero for constants.
2382 This function tests whether the operands are indistinguishable;
2383 it does not test whether they are equal using C's == operation.
2384 The distinction is important for IEEE floating point, because
2385 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2386 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2388 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2389 even though it may hold multiple values during a function.
2390 This is because a GCC tree node guarantees that nothing else is
2391 executed between the evaluation of its "operands" (which may often
2392 be evaluated in arbitrary order). Hence if the operands themselves
2393 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2394 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2395 unset means assuming isochronic (or instantaneous) tree equivalence.
2396 Unless comparing arbitrary expression trees, such as from different
2397 statements, this flag can usually be left unset.
2399 If OEP_PURE_SAME is set, then pure functions with identical arguments
2400 are considered the same. It is used when the caller has other ways
2401 to ensure that global memory is unchanged in between. */
2404 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2406 /* If either is ERROR_MARK, they aren't equal. */
2407 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2410 /* If both types don't have the same signedness, then we can't consider
2411 them equal. We must check this before the STRIP_NOPS calls
2412 because they may change the signedness of the arguments. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2419 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2420 /* This is needed for conversions and for COMPONENT_REF.
2421 Might as well play it safe and always test this. */
2422 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2423 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2424 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2427 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2428 We don't care about side effects in that case because the SAVE_EXPR
2429 takes care of that for us. In all other cases, two expressions are
2430 equal if they have no side effects. If we have two identical
2431 expressions with side effects that should be treated the same due
2432 to the only side effects being identical SAVE_EXPR's, that will
2433 be detected in the recursive calls below. */
2434 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2435 && (TREE_CODE (arg0) == SAVE_EXPR
2436 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2439 /* Next handle constant cases, those for which we can return 1 even
2440 if ONLY_CONST is set. */
2441 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2442 switch (TREE_CODE (arg0))
2445 return (! TREE_CONSTANT_OVERFLOW (arg0)
2446 && ! TREE_CONSTANT_OVERFLOW (arg1)
2447 && tree_int_cst_equal (arg0, arg1));
2450 return (! TREE_CONSTANT_OVERFLOW (arg0)
2451 && ! TREE_CONSTANT_OVERFLOW (arg1)
2452 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2453 TREE_REAL_CST (arg1)));
2459 if (TREE_CONSTANT_OVERFLOW (arg0)
2460 || TREE_CONSTANT_OVERFLOW (arg1))
2463 v1 = TREE_VECTOR_CST_ELTS (arg0);
2464 v2 = TREE_VECTOR_CST_ELTS (arg1);
2467 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2470 v1 = TREE_CHAIN (v1);
2471 v2 = TREE_CHAIN (v2);
2478 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2484 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2485 && ! memcmp (TREE_STRING_POINTER (arg0),
2486 TREE_STRING_POINTER (arg1),
2487 TREE_STRING_LENGTH (arg0)));
2490 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2496 if (flags & OEP_ONLY_CONST)
2499 /* Define macros to test an operand from arg0 and arg1 for equality and a
2500 variant that allows null and views null as being different from any
2501 non-null value. In the latter case, if either is null, the both
2502 must be; otherwise, do the normal comparison. */
2503 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2504 TREE_OPERAND (arg1, N), flags)
2506 #define OP_SAME_WITH_NULL(N) \
2507 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2508 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2510 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2513 /* Two conversions are equal only if signedness and modes match. */
2514 switch (TREE_CODE (arg0))
2519 case FIX_TRUNC_EXPR:
2520 case FIX_FLOOR_EXPR:
2521 case FIX_ROUND_EXPR:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2533 case tcc_comparison:
2535 if (OP_SAME (0) && OP_SAME (1))
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0))
2540 && operand_equal_p (TREE_OPERAND (arg0, 0),
2541 TREE_OPERAND (arg1, 1), flags)
2542 && operand_equal_p (TREE_OPERAND (arg0, 1),
2543 TREE_OPERAND (arg1, 0), flags));
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1))
2552 switch (TREE_CODE (arg0))
2555 case ALIGN_INDIRECT_REF:
2556 case MISALIGNED_INDIRECT_REF:
2562 case ARRAY_RANGE_REF:
2563 /* Operands 2 and 3 may be null. */
2566 && OP_SAME_WITH_NULL (2)
2567 && OP_SAME_WITH_NULL (3));
2570 /* Handle operand 2 the same as for ARRAY_REF. */
2571 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2574 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2580 case tcc_expression:
2581 switch (TREE_CODE (arg0))
2584 case TRUTH_NOT_EXPR:
2587 case TRUTH_ANDIF_EXPR:
2588 case TRUTH_ORIF_EXPR:
2589 return OP_SAME (0) && OP_SAME (1);
2591 case TRUTH_AND_EXPR:
2593 case TRUTH_XOR_EXPR:
2594 if (OP_SAME (0) && OP_SAME (1))
2597 /* Otherwise take into account this is a commutative operation. */
2598 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2599 TREE_OPERAND (arg1, 1), flags)
2600 && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 TREE_OPERAND (arg1, 0), flags));
2604 /* If the CALL_EXPRs call different functions, then they
2605 clearly can not be equal. */
2610 unsigned int cef = call_expr_flags (arg0);
2611 if (flags & OEP_PURE_SAME)
2612 cef &= ECF_CONST | ECF_PURE;
2619 /* Now see if all the arguments are the same. operand_equal_p
2620 does not handle TREE_LIST, so we walk the operands here
2621 feeding them to operand_equal_p. */
2622 arg0 = TREE_OPERAND (arg0, 1);
2623 arg1 = TREE_OPERAND (arg1, 1);
2624 while (arg0 && arg1)
2626 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2630 arg0 = TREE_CHAIN (arg0);
2631 arg1 = TREE_CHAIN (arg1);
2634 /* If we get here and both argument lists are exhausted
2635 then the CALL_EXPRs are equal. */
2636 return ! (arg0 || arg1);
2642 case tcc_declaration:
2643 /* Consider __builtin_sqrt equal to sqrt. */
2644 return (TREE_CODE (arg0) == FUNCTION_DECL
2645 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2646 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2647 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2654 #undef OP_SAME_WITH_NULL
2657 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2658 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2660 When in doubt, return 0. */
2663 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2665 int unsignedp1, unsignedpo;
2666 tree primarg0, primarg1, primother;
2667 unsigned int correct_width;
2669 if (operand_equal_p (arg0, arg1, 0))
2672 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2673 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2676 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2677 and see if the inner values are the same. This removes any
2678 signedness comparison, which doesn't matter here. */
2679 primarg0 = arg0, primarg1 = arg1;
2680 STRIP_NOPS (primarg0);
2681 STRIP_NOPS (primarg1);
2682 if (operand_equal_p (primarg0, primarg1, 0))
2685 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2686 actual comparison operand, ARG0.
2688 First throw away any conversions to wider types
2689 already present in the operands. */
2691 primarg1 = get_narrower (arg1, &unsignedp1);
2692 primother = get_narrower (other, &unsignedpo);
2694 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2695 if (unsignedp1 == unsignedpo
2696 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2697 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2699 tree type = TREE_TYPE (arg0);
2701 /* Make sure shorter operand is extended the right way
2702 to match the longer operand. */
2703 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2704 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2706 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2713 /* See if ARG is an expression that is either a comparison or is performing
2714 arithmetic on comparisons. The comparisons must only be comparing
2715 two different values, which will be stored in *CVAL1 and *CVAL2; if
2716 they are nonzero it means that some operands have already been found.
2717 No variables may be used anywhere else in the expression except in the
2718 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2719 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2721 If this is true, return 1. Otherwise, return zero. */
2724 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2726 enum tree_code code = TREE_CODE (arg);
2727 enum tree_code_class class = TREE_CODE_CLASS (code);
2729 /* We can handle some of the tcc_expression cases here. */
2730 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2732 else if (class == tcc_expression
2733 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2734 || code == COMPOUND_EXPR))
2737 else if (class == tcc_expression && code == SAVE_EXPR
2738 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2740 /* If we've already found a CVAL1 or CVAL2, this expression is
2741 two complex to handle. */
2742 if (*cval1 || *cval2)
2752 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2755 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2756 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2757 cval1, cval2, save_p));
2762 case tcc_expression:
2763 if (code == COND_EXPR)
2764 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2765 cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p)
2768 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2769 cval1, cval2, save_p));
2772 case tcc_comparison:
2773 /* First see if we can handle the first operand, then the second. For
2774 the second operand, we know *CVAL1 can't be zero. It must be that
2775 one side of the comparison is each of the values; test for the
2776 case where this isn't true by failing if the two operands
2779 if (operand_equal_p (TREE_OPERAND (arg, 0),
2780 TREE_OPERAND (arg, 1), 0))
2784 *cval1 = TREE_OPERAND (arg, 0);
2785 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2787 else if (*cval2 == 0)
2788 *cval2 = TREE_OPERAND (arg, 0);
2789 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2794 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2796 else if (*cval2 == 0)
2797 *cval2 = TREE_OPERAND (arg, 1);
2798 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2810 /* ARG is a tree that is known to contain just arithmetic operations and
2811 comparisons. Evaluate the operations in the tree substituting NEW0 for
2812 any occurrence of OLD0 as an operand of a comparison and likewise for
2816 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2818 tree type = TREE_TYPE (arg);
2819 enum tree_code code = TREE_CODE (arg);
2820 enum tree_code_class class = TREE_CODE_CLASS (code);
2822 /* We can handle some of the tcc_expression cases here. */
2823 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2825 else if (class == tcc_expression
2826 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2832 return fold_build1 (code, type,
2833 eval_subst (TREE_OPERAND (arg, 0),
2834 old0, new0, old1, new1));
2837 return fold_build2 (code, type,
2838 eval_subst (TREE_OPERAND (arg, 0),
2839 old0, new0, old1, new1),
2840 eval_subst (TREE_OPERAND (arg, 1),
2841 old0, new0, old1, new1));
2843 case tcc_expression:
2847 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2850 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2853 return fold_build3 (code, type,
2854 eval_subst (TREE_OPERAND (arg, 0),
2855 old0, new0, old1, new1),
2856 eval_subst (TREE_OPERAND (arg, 1),
2857 old0, new0, old1, new1),
2858 eval_subst (TREE_OPERAND (arg, 2),
2859 old0, new0, old1, new1));
2863 /* Fall through - ??? */
2865 case tcc_comparison:
2867 tree arg0 = TREE_OPERAND (arg, 0);
2868 tree arg1 = TREE_OPERAND (arg, 1);
2870 /* We need to check both for exact equality and tree equality. The
2871 former will be true if the operand has a side-effect. In that
2872 case, we know the operand occurred exactly once. */
2874 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2876 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2879 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2881 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2884 return fold_build2 (code, type, arg0, arg1);
2892 /* Return a tree for the case when the result of an expression is RESULT
2893 converted to TYPE and OMITTED was previously an operand of the expression
2894 but is now not needed (e.g., we folded OMITTED * 0).
2896 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2897 the conversion of RESULT to TYPE. */
2900 omit_one_operand (tree type, tree result, tree omitted)
2902 tree t = fold_convert (type, result);
2904 if (TREE_SIDE_EFFECTS (omitted))
2905 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2907 return non_lvalue (t);
2910 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2913 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2915 tree t = fold_convert (type, result);
2917 if (TREE_SIDE_EFFECTS (omitted))
2918 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2920 return pedantic_non_lvalue (t);
2923 /* Return a tree for the case when the result of an expression is RESULT
2924 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2925 of the expression but are now not needed.
2927 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2928 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2929 evaluated before OMITTED2. Otherwise, if neither has side effects,
2930 just do the conversion of RESULT to TYPE. */
2933 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2935 tree t = fold_convert (type, result);
2937 if (TREE_SIDE_EFFECTS (omitted2))
2938 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2939 if (TREE_SIDE_EFFECTS (omitted1))
2940 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2942 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2946 /* Return a simplified tree node for the truth-negation of ARG. This
2947 never alters ARG itself. We assume that ARG is an operation that
2948 returns a truth value (0 or 1).
2950 FIXME: one would think we would fold the result, but it causes
2951 problems with the dominator optimizer. */
2953 invert_truthvalue (tree arg)
2955 tree type = TREE_TYPE (arg);
2956 enum tree_code code = TREE_CODE (arg);
2958 if (code == ERROR_MARK)
2961 /* If this is a comparison, we can simply invert it, except for
2962 floating-point non-equality comparisons, in which case we just
2963 enclose a TRUTH_NOT_EXPR around what we have. */
2965 if (TREE_CODE_CLASS (code) == tcc_comparison)
2967 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2968 if (FLOAT_TYPE_P (op_type)
2969 && flag_trapping_math
2970 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2971 && code != NE_EXPR && code != EQ_EXPR)
2972 return build1 (TRUTH_NOT_EXPR, type, arg);
2975 code = invert_tree_comparison (code,
2976 HONOR_NANS (TYPE_MODE (op_type)));
2977 if (code == ERROR_MARK)
2978 return build1 (TRUTH_NOT_EXPR, type, arg);
2980 return build2 (code, type,
2981 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2988 return constant_boolean_node (integer_zerop (arg), type);
2990 case TRUTH_AND_EXPR:
2991 return build2 (TRUTH_OR_EXPR, type,
2992 invert_truthvalue (TREE_OPERAND (arg, 0)),
2993 invert_truthvalue (TREE_OPERAND (arg, 1)));
2996 return build2 (TRUTH_AND_EXPR, type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)),
2998 invert_truthvalue (TREE_OPERAND (arg, 1)));
3000 case TRUTH_XOR_EXPR:
3001 /* Here we can invert either operand. We invert the first operand
3002 unless the second operand is a TRUTH_NOT_EXPR in which case our
3003 result is the XOR of the first operand with the inside of the
3004 negation of the second operand. */
3006 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3007 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3008 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3010 return build2 (TRUTH_XOR_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)),
3012 TREE_OPERAND (arg, 1));
3014 case TRUTH_ANDIF_EXPR:
3015 return build2 (TRUTH_ORIF_EXPR, type,
3016 invert_truthvalue (TREE_OPERAND (arg, 0)),
3017 invert_truthvalue (TREE_OPERAND (arg, 1)));
3019 case TRUTH_ORIF_EXPR:
3020 return build2 (TRUTH_ANDIF_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 invert_truthvalue (TREE_OPERAND (arg, 1)));
3024 case TRUTH_NOT_EXPR:
3025 return TREE_OPERAND (arg, 0);
3028 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3029 invert_truthvalue (TREE_OPERAND (arg, 1)),
3030 invert_truthvalue (TREE_OPERAND (arg, 2)));
3033 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3034 invert_truthvalue (TREE_OPERAND (arg, 1)));
3036 case NON_LVALUE_EXPR:
3037 return invert_truthvalue (TREE_OPERAND (arg, 0));
3040 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3045 return build1 (TREE_CODE (arg), type,
3046 invert_truthvalue (TREE_OPERAND (arg, 0)));
3049 if (!integer_onep (TREE_OPERAND (arg, 1)))
3051 return build2 (EQ_EXPR, type, arg,
3052 fold_convert (type, integer_zero_node));
3055 return build1 (TRUTH_NOT_EXPR, type, arg);
3057 case CLEANUP_POINT_EXPR:
3058 return build1 (CLEANUP_POINT_EXPR, type,
3059 invert_truthvalue (TREE_OPERAND (arg, 0)));
3064 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3065 return build1 (TRUTH_NOT_EXPR, type, arg);
3068 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3069 operands are another bit-wise operation with a common input. If so,
3070 distribute the bit operations to save an operation and possibly two if
3071 constants are involved. For example, convert
3072 (A | B) & (A | C) into A | (B & C)
3073 Further simplification will occur if B and C are constants.
3075 If this optimization cannot be done, 0 will be returned. */
3078 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3083 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3084 || TREE_CODE (arg0) == code
3085 || (TREE_CODE (arg0) != BIT_AND_EXPR
3086 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3089 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3091 common = TREE_OPERAND (arg0, 0);
3092 left = TREE_OPERAND (arg0, 1);
3093 right = TREE_OPERAND (arg1, 1);
3095 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3097 common = TREE_OPERAND (arg0, 0);
3098 left = TREE_OPERAND (arg0, 1);
3099 right = TREE_OPERAND (arg1, 0);
3101 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3103 common = TREE_OPERAND (arg0, 1);
3104 left = TREE_OPERAND (arg0, 0);
3105 right = TREE_OPERAND (arg1, 1);
3107 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3109 common = TREE_OPERAND (arg0, 1);
3110 left = TREE_OPERAND (arg0, 0);
3111 right = TREE_OPERAND (arg1, 0);
3116 return fold_build2 (TREE_CODE (arg0), type, common,
3117 fold_build2 (code, type, left, right));
3120 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3121 with code CODE. This optimization is unsafe. */
3123 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3125 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3126 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3128 /* (A / C) +- (B / C) -> (A +- B) / C. */
3130 && operand_equal_p (TREE_OPERAND (arg0, 1),
3131 TREE_OPERAND (arg1, 1), 0))
3132 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3133 fold_build2 (code, type,
3134 TREE_OPERAND (arg0, 0),
3135 TREE_OPERAND (arg1, 0)),
3136 TREE_OPERAND (arg0, 1));
3138 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3139 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3140 TREE_OPERAND (arg1, 0), 0)
3141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3142 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3144 REAL_VALUE_TYPE r0, r1;
3145 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3146 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3148 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3150 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3151 real_arithmetic (&r0, code, &r0, &r1);
3152 return fold_build2 (MULT_EXPR, type,
3153 TREE_OPERAND (arg0, 0),
3154 build_real (type, r0));
3160 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3161 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3164 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3171 tree size = TYPE_SIZE (TREE_TYPE (inner));
3172 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3173 || POINTER_TYPE_P (TREE_TYPE (inner)))
3174 && host_integerp (size, 0)
3175 && tree_low_cst (size, 0) == bitsize)
3176 return fold_convert (type, inner);
3179 result = build3 (BIT_FIELD_REF, type, inner,
3180 size_int (bitsize), bitsize_int (bitpos));
3182 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3187 /* Optimize a bit-field compare.
3189 There are two cases: First is a compare against a constant and the
3190 second is a comparison of two items where the fields are at the same
3191 bit position relative to the start of a chunk (byte, halfword, word)
3192 large enough to contain it. In these cases we can avoid the shift
3193 implicit in bitfield extractions.
3195 For constants, we emit a compare of the shifted constant with the
3196 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3197 compared. For two fields at the same position, we do the ANDs with the
3198 similar mask and compare the result of the ANDs.
3200 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3201 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3202 are the left and right operands of the comparison, respectively.
3204 If the optimization described above can be done, we return the resulting
3205 tree. Otherwise we return zero. */
3208 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3211 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3212 tree type = TREE_TYPE (lhs);
3213 tree signed_type, unsigned_type;
3214 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3215 enum machine_mode lmode, rmode, nmode;
3216 int lunsignedp, runsignedp;
3217 int lvolatilep = 0, rvolatilep = 0;
3218 tree linner, rinner = NULL_TREE;
3222 /* Get all the information about the extractions being done. If the bit size
3223 if the same as the size of the underlying object, we aren't doing an
3224 extraction at all and so can do nothing. We also don't want to
3225 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3226 then will no longer be able to replace it. */
3227 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3228 &lunsignedp, &lvolatilep, false);
3229 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3230 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3235 /* If this is not a constant, we can only do something if bit positions,
3236 sizes, and signedness are the same. */
3237 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3238 &runsignedp, &rvolatilep, false);
3240 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3241 || lunsignedp != runsignedp || offset != 0
3242 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3246 /* See if we can find a mode to refer to this field. We should be able to,
3247 but fail if we can't. */
3248 nmode = get_best_mode (lbitsize, lbitpos,
3249 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3250 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3251 TYPE_ALIGN (TREE_TYPE (rinner))),
3252 word_mode, lvolatilep || rvolatilep);
3253 if (nmode == VOIDmode)
3256 /* Set signed and unsigned types of the precision of this mode for the
3258 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3259 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3261 /* Compute the bit position and size for the new reference and our offset
3262 within it. If the new reference is the same size as the original, we
3263 won't optimize anything, so return zero. */
3264 nbitsize = GET_MODE_BITSIZE (nmode);
3265 nbitpos = lbitpos & ~ (nbitsize - 1);
3267 if (nbitsize == lbitsize)
3270 if (BYTES_BIG_ENDIAN)
3271 lbitpos = nbitsize - lbitsize - lbitpos;
3273 /* Make the mask to be used against the extracted field. */
3274 mask = build_int_cst (unsigned_type, -1);
3275 mask = force_fit_type (mask, 0, false, false);
3276 mask = fold_convert (unsigned_type, mask);
3277 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3278 mask = const_binop (RSHIFT_EXPR, mask,
3279 size_int (nbitsize - lbitsize - lbitpos), 0);
3282 /* If not comparing with constant, just rework the comparison
3284 return build2 (code, compare_type,
3285 build2 (BIT_AND_EXPR, unsigned_type,
3286 make_bit_field_ref (linner, unsigned_type,
3287 nbitsize, nbitpos, 1),
3289 build2 (BIT_AND_EXPR, unsigned_type,
3290 make_bit_field_ref (rinner, unsigned_type,
3291 nbitsize, nbitpos, 1),
3294 /* Otherwise, we are handling the constant case. See if the constant is too
3295 big for the field. Warn and return a tree of for 0 (false) if so. We do
3296 this not only for its own sake, but to avoid having to test for this
3297 error case below. If we didn't, we might generate wrong code.
3299 For unsigned fields, the constant shifted right by the field length should
3300 be all zero. For signed fields, the high-order bits should agree with
3305 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3306 fold_convert (unsigned_type, rhs),
3307 size_int (lbitsize), 0)))
3309 warning (0, "comparison is always %d due to width of bit-field",
3311 return constant_boolean_node (code == NE_EXPR, compare_type);
3316 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3317 size_int (lbitsize - 1), 0);
3318 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3320 warning (0, "comparison is always %d due to width of bit-field",
3322 return constant_boolean_node (code == NE_EXPR, compare_type);
3326 /* Single-bit compares should always be against zero. */
3327 if (lbitsize == 1 && ! integer_zerop (rhs))
3329 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3330 rhs = fold_convert (type, integer_zero_node);
3333 /* Make a new bitfield reference, shift the constant over the
3334 appropriate number of bits and mask it with the computed mask
3335 (in case this was a signed field). If we changed it, make a new one. */
3336 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3339 TREE_SIDE_EFFECTS (lhs) = 1;
3340 TREE_THIS_VOLATILE (lhs) = 1;
3343 rhs = fold (const_binop (BIT_AND_EXPR,
3344 const_binop (LSHIFT_EXPR,
3345 fold_convert (unsigned_type, rhs),
3346 size_int (lbitpos), 0),
3349 return build2 (code, compare_type,
3350 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3354 /* Subroutine for fold_truthop: decode a field reference.
3356 If EXP is a comparison reference, we return the innermost reference.
3358 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3359 set to the starting bit number.
3361 If the innermost field can be completely contained in a mode-sized
3362 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3364 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3365 otherwise it is not changed.
3367 *PUNSIGNEDP is set to the signedness of the field.
3369 *PMASK is set to the mask used. This is either contained in a
3370 BIT_AND_EXPR or derived from the width of the field.
3372 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3374 Return 0 if this is not a component reference or is one that we can't
3375 do anything with. */
3378 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3379 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3380 int *punsignedp, int *pvolatilep,
3381 tree *pmask, tree *pand_mask)
3383 tree outer_type = 0;
3385 tree mask, inner, offset;
3387 unsigned int precision;
3389 /* All the optimizations using this function assume integer fields.
3390 There are problems with FP fields since the type_for_size call
3391 below can fail for, e.g., XFmode. */
3392 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3395 /* We are interested in the bare arrangement of bits, so strip everything
3396 that doesn't affect the machine mode. However, record the type of the
3397 outermost expression if it may matter below. */
3398 if (TREE_CODE (exp) == NOP_EXPR
3399 || TREE_CODE (exp) == CONVERT_EXPR
3400 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3401 outer_type = TREE_TYPE (exp);
3404 if (TREE_CODE (exp) == BIT_AND_EXPR)
3406 and_mask = TREE_OPERAND (exp, 1);
3407 exp = TREE_OPERAND (exp, 0);
3408 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3409 if (TREE_CODE (and_mask) != INTEGER_CST)
3413 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3414 punsignedp, pvolatilep, false);
3415 if ((inner == exp && and_mask == 0)
3416 || *pbitsize < 0 || offset != 0
3417 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3420 /* If the number of bits in the reference is the same as the bitsize of
3421 the outer type, then the outer type gives the signedness. Otherwise
3422 (in case of a small bitfield) the signedness is unchanged. */
3423 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3424 *punsignedp = TYPE_UNSIGNED (outer_type);
3426 /* Compute the mask to access the bitfield. */
3427 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3428 precision = TYPE_PRECISION (unsigned_type);
3430 mask = build_int_cst (unsigned_type, -1);
3431 mask = force_fit_type (mask, 0, false, false);
3433 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3434 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3436 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3438 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3439 fold_convert (unsigned_type, and_mask), mask);
3442 *pand_mask = and_mask;
3446 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3450 all_ones_mask_p (tree mask, int size)
3452 tree type = TREE_TYPE (mask);
3453 unsigned int precision = TYPE_PRECISION (type);
3456 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3457 tmask = force_fit_type (tmask, 0, false, false);
3460 tree_int_cst_equal (mask,
3461 const_binop (RSHIFT_EXPR,
3462 const_binop (LSHIFT_EXPR, tmask,
3463 size_int (precision - size),
3465 size_int (precision - size), 0));
3468 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3469 represents the sign bit of EXP's type. If EXP represents a sign
3470 or zero extension, also test VAL against the unextended type.
3471 The return value is the (sub)expression whose sign bit is VAL,
3472 or NULL_TREE otherwise. */
3475 sign_bit_p (tree exp, tree val)
3477 unsigned HOST_WIDE_INT mask_lo, lo;
3478 HOST_WIDE_INT mask_hi, hi;
3482 /* Tree EXP must have an integral type. */
3483 t = TREE_TYPE (exp);
3484 if (! INTEGRAL_TYPE_P (t))
3487 /* Tree VAL must be an integer constant. */
3488 if (TREE_CODE (val) != INTEGER_CST
3489 || TREE_CONSTANT_OVERFLOW (val))
3492 width = TYPE_PRECISION (t);
3493 if (width > HOST_BITS_PER_WIDE_INT)
3495 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3498 mask_hi = ((unsigned HOST_WIDE_INT) -1
3499 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3505 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3508 mask_lo = ((unsigned HOST_WIDE_INT) -1
3509 >> (HOST_BITS_PER_WIDE_INT - width));
3512 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3513 treat VAL as if it were unsigned. */
3514 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3515 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3518 /* Handle extension from a narrower type. */
3519 if (TREE_CODE (exp) == NOP_EXPR
3520 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3521 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3526 /* Subroutine for fold_truthop: determine if an operand is simple enough
3527 to be evaluated unconditionally. */
3530 simple_operand_p (tree exp)
3532 /* Strip any conversions that don't change the machine mode. */
3535 return (CONSTANT_CLASS_P (exp)
3536 || TREE_CODE (exp) == SSA_NAME
3538 && ! TREE_ADDRESSABLE (exp)
3539 && ! TREE_THIS_VOLATILE (exp)
3540 && ! DECL_NONLOCAL (exp)
3541 /* Don't regard global variables as simple. They may be
3542 allocated in ways unknown to the compiler (shared memory,
3543 #pragma weak, etc). */
3544 && ! TREE_PUBLIC (exp)
3545 && ! DECL_EXTERNAL (exp)
3546 /* Loading a static variable is unduly expensive, but global
3547 registers aren't expensive. */
3548 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3551 /* The following functions are subroutines to fold_range_test and allow it to
3552 try to change a logical combination of comparisons into a range test.
3555 X == 2 || X == 3 || X == 4 || X == 5
3559 (unsigned) (X - 2) <= 3
3561 We describe each set of comparisons as being either inside or outside
3562 a range, using a variable named like IN_P, and then describe the
3563 range with a lower and upper bound. If one of the bounds is omitted,
3564 it represents either the highest or lowest value of the type.
3566 In the comments below, we represent a range by two numbers in brackets
3567 preceded by a "+" to designate being inside that range, or a "-" to
3568 designate being outside that range, so the condition can be inverted by
3569 flipping the prefix. An omitted bound is represented by a "-". For
3570 example, "- [-, 10]" means being outside the range starting at the lowest
3571 possible value and ending at 10, in other words, being greater than 10.
3572 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3575 We set up things so that the missing bounds are handled in a consistent
3576 manner so neither a missing bound nor "true" and "false" need to be
3577 handled using a special case. */
3579 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3580 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3581 and UPPER1_P are nonzero if the respective argument is an upper bound
3582 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3583 must be specified for a comparison. ARG1 will be converted to ARG0's
3584 type if both are specified. */
3587 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3588 tree arg1, int upper1_p)
3594 /* If neither arg represents infinity, do the normal operation.
3595 Else, if not a comparison, return infinity. Else handle the special
3596 comparison rules. Note that most of the cases below won't occur, but
3597 are handled for consistency. */
3599 if (arg0 != 0 && arg1 != 0)
3601 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3602 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3604 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3607 if (TREE_CODE_CLASS (code) != tcc_comparison)
3610 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3611 for neither. In real maths, we cannot assume open ended ranges are
3612 the same. But, this is computer arithmetic, where numbers are finite.
3613 We can therefore make the transformation of any unbounded range with
3614 the value Z, Z being greater than any representable number. This permits
3615 us to treat unbounded ranges as equal. */
3616 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3617 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3621 result = sgn0 == sgn1;
3624 result = sgn0 != sgn1;
3627 result = sgn0 < sgn1;
3630 result = sgn0 <= sgn1;
3633 result = sgn0 > sgn1;
3636 result = sgn0 >= sgn1;
3642 return constant_boolean_node (result, type);
3645 /* Given EXP, a logical expression, set the range it is testing into
3646 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3647 actually being tested. *PLOW and *PHIGH will be made of the same type
3648 as the returned expression. If EXP is not a comparison, we will most
3649 likely not be returning a useful value and range. */
3652 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3654 enum tree_code code;
3655 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3656 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3658 tree low, high, n_low, n_high;
3660 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3661 and see if we can refine the range. Some of the cases below may not
3662 happen, but it doesn't seem worth worrying about this. We "continue"
3663 the outer loop when we've changed something; otherwise we "break"
3664 the switch, which will "break" the while. */
3667 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3671 code = TREE_CODE (exp);
3672 exp_type = TREE_TYPE (exp);
3674 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3676 if (TREE_CODE_LENGTH (code) > 0)
3677 arg0 = TREE_OPERAND (exp, 0);
3678 if (TREE_CODE_CLASS (code) == tcc_comparison
3679 || TREE_CODE_CLASS (code) == tcc_unary
3680 || TREE_CODE_CLASS (code) == tcc_binary)
3681 arg0_type = TREE_TYPE (arg0);
3682 if (TREE_CODE_CLASS (code) == tcc_binary
3683 || TREE_CODE_CLASS (code) == tcc_comparison
3684 || (TREE_CODE_CLASS (code) == tcc_expression
3685 && TREE_CODE_LENGTH (code) > 1))
3686 arg1 = TREE_OPERAND (exp, 1);
3691 case TRUTH_NOT_EXPR:
3692 in_p = ! in_p, exp = arg0;
3695 case EQ_EXPR: case NE_EXPR:
3696 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3697 /* We can only do something if the range is testing for zero
3698 and if the second operand is an integer constant. Note that
3699 saying something is "in" the range we make is done by
3700 complementing IN_P since it will set in the initial case of
3701 being not equal to zero; "out" is leaving it alone. */
3702 if (low == 0 || high == 0
3703 || ! integer_zerop (low) || ! integer_zerop (high)
3704 || TREE_CODE (arg1) != INTEGER_CST)
3709 case NE_EXPR: /* - [c, c] */
3712 case EQ_EXPR: /* + [c, c] */
3713 in_p = ! in_p, low = high = arg1;
3715 case GT_EXPR: /* - [-, c] */
3716 low = 0, high = arg1;
3718 case GE_EXPR: /* + [c, -] */
3719 in_p = ! in_p, low = arg1, high = 0;
3721 case LT_EXPR: /* - [c, -] */
3722 low = arg1, high = 0;
3724 case LE_EXPR: /* + [-, c] */
3725 in_p = ! in_p, low = 0, high = arg1;
3731 /* If this is an unsigned comparison, we also know that EXP is
3732 greater than or equal to zero. We base the range tests we make
3733 on that fact, so we record it here so we can parse existing
3734 range tests. We test arg0_type since often the return type
3735 of, e.g. EQ_EXPR, is boolean. */
3736 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3738 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3740 fold_convert (arg0_type, integer_zero_node),
3744 in_p = n_in_p, low = n_low, high = n_high;
3746 /* If the high bound is missing, but we have a nonzero low
3747 bound, reverse the range so it goes from zero to the low bound
3749 if (high == 0 && low && ! integer_zerop (low))
3752 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3753 integer_one_node, 0);
3754 low = fold_convert (arg0_type, integer_zero_node);
3762 /* (-x) IN [a,b] -> x in [-b, -a] */
3763 n_low = range_binop (MINUS_EXPR, exp_type,
3764 fold_convert (exp_type, integer_zero_node),
3766 n_high = range_binop (MINUS_EXPR, exp_type,
3767 fold_convert (exp_type, integer_zero_node),
3769 low = n_low, high = n_high;
3775 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3776 fold_convert (exp_type, integer_one_node));
3779 case PLUS_EXPR: case MINUS_EXPR:
3780 if (TREE_CODE (arg1) != INTEGER_CST)
3783 /* If EXP is signed, any overflow in the computation is undefined,
3784 so we don't worry about it so long as our computations on
3785 the bounds don't overflow. For unsigned, overflow is defined
3786 and this is exactly the right thing. */
3787 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3788 arg0_type, low, 0, arg1, 0);
3789 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3790 arg0_type, high, 1, arg1, 0);
3791 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3792 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3795 /* Check for an unsigned range which has wrapped around the maximum
3796 value thus making n_high < n_low, and normalize it. */
3797 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3799 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3800 integer_one_node, 0);
3801 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3802 integer_one_node, 0);
3804 /* If the range is of the form +/- [ x+1, x ], we won't
3805 be able to normalize it. But then, it represents the
3806 whole range or the empty set, so make it
3808 if (tree_int_cst_equal (n_low, low)
3809 && tree_int_cst_equal (n_high, high))
3815 low = n_low, high = n_high;
3820 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3821 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3824 if (! INTEGRAL_TYPE_P (arg0_type)
3825 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3826 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3829 n_low = low, n_high = high;
3832 n_low = fold_convert (arg0_type, n_low);
3835 n_high = fold_convert (arg0_type, n_high);
3838 /* If we're converting arg0 from an unsigned type, to exp,
3839 a signed type, we will be doing the comparison as unsigned.
3840 The tests above have already verified that LOW and HIGH
3843 So we have to ensure that we will handle large unsigned
3844 values the same way that the current signed bounds treat
3847 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3850 tree equiv_type = lang_hooks.types.type_for_mode
3851 (TYPE_MODE (arg0_type), 1);
3853 /* A range without an upper bound is, naturally, unbounded.
3854 Since convert would have cropped a very large value, use
3855 the max value for the destination type. */
3857 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3858 : TYPE_MAX_VALUE (arg0_type);
3860 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3861 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3862 fold_convert (arg0_type,
3864 fold_convert (arg0_type,
3867 /* If the low bound is specified, "and" the range with the
3868 range for which the original unsigned value will be
3872 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3873 1, n_low, n_high, 1,
3874 fold_convert (arg0_type,
3879 in_p = (n_in_p == in_p);
3883 /* Otherwise, "or" the range with the range of the input
3884 that will be interpreted as negative. */
3885 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3886 0, n_low, n_high, 1,
3887 fold_convert (arg0_type,
3892 in_p = (in_p != n_in_p);
3897 low = n_low, high = n_high;
3907 /* If EXP is a constant, we can evaluate whether this is true or false. */
3908 if (TREE_CODE (exp) == INTEGER_CST)
3910 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3912 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3918 *pin_p = in_p, *plow = low, *phigh = high;
3922 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3923 type, TYPE, return an expression to test if EXP is in (or out of, depending
3924 on IN_P) the range. Return 0 if the test couldn't be created. */
3927 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3929 tree etype = TREE_TYPE (exp);
3934 value = build_range_check (type, exp, 1, low, high);
3936 return invert_truthvalue (value);
3941 if (low == 0 && high == 0)
3942 return fold_convert (type, integer_one_node);
3945 return fold_build2 (LE_EXPR, type, exp,
3946 fold_convert (etype, high));
3949 return fold_build2 (GE_EXPR, type, exp,
3950 fold_convert (etype, low));
3952 if (operand_equal_p (low, high, 0))
3953 return fold_build2 (EQ_EXPR, type, exp,
3954 fold_convert (etype, low));
3956 if (integer_zerop (low))
3958 if (! TYPE_UNSIGNED (etype))
3960 etype = lang_hooks.types.unsigned_type (etype);
3961 high = fold_convert (etype, high);
3962 exp = fold_convert (etype, exp);
3964 return build_range_check (type, exp, 1, 0, high);
3967 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3968 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3970 unsigned HOST_WIDE_INT lo;
3974 prec = TYPE_PRECISION (etype);
3975 if (prec <= HOST_BITS_PER_WIDE_INT)
3978 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3982 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3983 lo = (unsigned HOST_WIDE_INT) -1;
3986 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3988 if (TYPE_UNSIGNED (etype))
3990 etype = lang_hooks.types.signed_type (etype);
3991 exp = fold_convert (etype, exp);
3993 return fold_build2 (GT_EXPR, type, exp,
3994 fold_convert (etype, integer_zero_node));
3998 value = const_binop (MINUS_EXPR, high, low, 0);
3999 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
4001 tree utype, minv, maxv;
4003 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4004 for the type in question, as we rely on this here. */
4005 switch (TREE_CODE (etype))
4010 utype = lang_hooks.types.unsigned_type (etype);
4011 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4012 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4013 integer_one_node, 1);
4014 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4015 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4019 high = fold_convert (etype, high);
4020 low = fold_convert (etype, low);
4021 exp = fold_convert (etype, exp);
4022 value = const_binop (MINUS_EXPR, high, low, 0);
4030 if (value != 0 && ! TREE_OVERFLOW (value))
4031 return build_range_check (type,
4032 fold_build2 (MINUS_EXPR, etype, exp, low),
4033 1, fold_convert (etype, integer_zero_node),
4039 /* Given two ranges, see if we can merge them into one. Return 1 if we
4040 can, 0 if we can't. Set the output range into the specified parameters. */
4043 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,