1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static tree fold_convert_const (enum tree_code, tree, tree);
93 static enum tree_code invert_tree_comparison (enum tree_code, bool);
94 static enum comparison_code comparison_to_compcode (enum tree_code);
95 static enum tree_code compcode_to_comparison (enum comparison_code);
96 static tree combine_comparisons (enum tree_code, enum tree_code,
97 enum tree_code, tree, tree, tree);
98 static int truth_value_p (enum tree_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand (tree, tree, tree);
103 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (tree, tree, int, int, int);
105 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
106 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
107 enum machine_mode *, int *, int *,
109 static int all_ones_mask_p (tree, int);
110 static tree sign_bit_p (tree, tree);
111 static int simple_operand_p (tree);
112 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
113 static tree make_range (tree, int *, tree *, tree *);
114 static tree build_range_check (tree, tree, int, tree, tree);
115 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 static tree fold_range_test (tree);
118 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 static bool tree_expr_nonzero_p (tree);
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is non-zero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is non-zero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
202 force_fit_type (tree t, int overflowable,
203 bool overflowed, bool overflowed_const)
205 unsigned HOST_WIDE_INT low;
208 int sign_extended_type;
210 if (TREE_CODE (t) != INTEGER_CST)
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
273 || (overflowable > 0 && sign_extended_type))
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
282 TREE_CONSTANT_OVERFLOW (t) = 1;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
303 h = h1 + h2 + (l < l1);
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
323 return (*hv & h1) < 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
360 for (j = 0; j < 4; j++)
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
410 if (SHIFT_COUNT_TRUNCATED)
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
465 unsigned HOST_WIDE_INT signmask;
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
471 if (SHIFT_COUNT_TRUNCATED)
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
481 else if (count >= HOST_BITS_PER_WIDE_INT)
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
615 /* This unsigned division rounds toward zero. */
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
668 { /* scale divisor and dividend */
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
747 decode (quo, lquo, hquo);
750 /* If result is negative, make it so. */
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, <wice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
842 negate_mathfn_p (enum built_in_function code)
866 /* Check whether we may negate an integer constant T without causing
870 may_negate_without_overflow_p (tree t)
872 unsigned HOST_WIDE_INT val;
876 if (TREE_CODE (t) != INTEGER_CST)
879 type = TREE_TYPE (t);
880 if (TYPE_UNSIGNED (type))
883 prec = TYPE_PRECISION (type);
884 if (prec > HOST_BITS_PER_WIDE_INT)
886 if (TREE_INT_CST_LOW (t) != 0)
888 prec -= HOST_BITS_PER_WIDE_INT;
889 val = TREE_INT_CST_HIGH (t);
892 val = TREE_INT_CST_LOW (t);
893 if (prec < HOST_BITS_PER_WIDE_INT)
894 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
895 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
898 /* Determine whether an expression T can be cheaply negated using
899 the function negate_expr. */
902 negate_expr_p (tree t)
909 type = TREE_TYPE (t);
912 switch (TREE_CODE (t))
915 if (TYPE_UNSIGNED (type) || ! flag_trapv)
918 /* Check that -CST will not overflow type. */
919 return may_negate_without_overflow_p (t);
926 return negate_expr_p (TREE_REALPART (t))
927 && negate_expr_p (TREE_IMAGPART (t));
930 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
932 /* -(A + B) -> (-B) - A. */
933 if (negate_expr_p (TREE_OPERAND (t, 1))
934 && reorder_operands_p (TREE_OPERAND (t, 0),
935 TREE_OPERAND (t, 1)))
937 /* -(A + B) -> (-A) - B. */
938 return negate_expr_p (TREE_OPERAND (t, 0));
941 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
942 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
943 && reorder_operands_p (TREE_OPERAND (t, 0),
944 TREE_OPERAND (t, 1));
947 if (TYPE_UNSIGNED (TREE_TYPE (t)))
953 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
954 return negate_expr_p (TREE_OPERAND (t, 1))
955 || negate_expr_p (TREE_OPERAND (t, 0));
959 /* Negate -((double)float) as (double)(-float). */
960 if (TREE_CODE (type) == REAL_TYPE)
962 tree tem = strip_float_extensions (t);
964 return negate_expr_p (tem);
969 /* Negate -f(x) as f(-x). */
970 if (negate_mathfn_p (builtin_mathfn_code (t)))
971 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
975 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
976 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
978 tree op1 = TREE_OPERAND (t, 1);
979 if (TREE_INT_CST_HIGH (op1) == 0
980 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
981 == TREE_INT_CST_LOW (op1))
992 /* Given T, an expression, return the negation of T. Allow for T to be
993 null, in which case return null. */
1004 type = TREE_TYPE (t);
1005 STRIP_SIGN_NOPS (t);
1007 switch (TREE_CODE (t))
1010 tem = fold_negate_const (t, type);
1011 if (! TREE_OVERFLOW (tem)
1012 || TYPE_UNSIGNED (type)
1018 tem = fold_negate_const (t, type);
1019 /* Two's complement FP formats, such as c4x, may overflow. */
1020 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1021 return fold_convert (type, tem);
1026 tree rpart = negate_expr (TREE_REALPART (t));
1027 tree ipart = negate_expr (TREE_IMAGPART (t));
1029 if ((TREE_CODE (rpart) == REAL_CST
1030 && TREE_CODE (ipart) == REAL_CST)
1031 || (TREE_CODE (rpart) == INTEGER_CST
1032 && TREE_CODE (ipart) == INTEGER_CST))
1033 return build_complex (type, rpart, ipart);
1038 return fold_convert (type, TREE_OPERAND (t, 0));
1041 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1043 /* -(A + B) -> (-B) - A. */
1044 if (negate_expr_p (TREE_OPERAND (t, 1))
1045 && reorder_operands_p (TREE_OPERAND (t, 0),
1046 TREE_OPERAND (t, 1)))
1048 tem = negate_expr (TREE_OPERAND (t, 1));
1049 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1050 tem, TREE_OPERAND (t, 0)));
1051 return fold_convert (type, tem);
1054 /* -(A + B) -> (-A) - B. */
1055 if (negate_expr_p (TREE_OPERAND (t, 0)))
1057 tem = negate_expr (TREE_OPERAND (t, 0));
1058 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1059 tem, TREE_OPERAND (t, 1)));
1060 return fold_convert (type, tem);
1066 /* - (A - B) -> B - A */
1067 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1068 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1069 return fold_convert (type,
1070 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1071 TREE_OPERAND (t, 1),
1072 TREE_OPERAND (t, 0))));
1076 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1082 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1084 tem = TREE_OPERAND (t, 1);
1085 if (negate_expr_p (tem))
1086 return fold_convert (type,
1087 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1088 TREE_OPERAND (t, 0),
1089 negate_expr (tem))));
1090 tem = TREE_OPERAND (t, 0);
1091 if (negate_expr_p (tem))
1092 return fold_convert (type,
1093 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1095 TREE_OPERAND (t, 1))));
1100 /* Convert -((double)float) into (double)(-float). */
1101 if (TREE_CODE (type) == REAL_TYPE)
1103 tem = strip_float_extensions (t);
1104 if (tem != t && negate_expr_p (tem))
1105 return fold_convert (type, negate_expr (tem));
1110 /* Negate -f(x) as f(-x). */
1111 if (negate_mathfn_p (builtin_mathfn_code (t))
1112 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1114 tree fndecl, arg, arglist;
1116 fndecl = get_callee_fndecl (t);
1117 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1118 arglist = build_tree_list (NULL_TREE, arg);
1119 return build_function_call_expr (fndecl, arglist);
1124 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1125 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1127 tree op1 = TREE_OPERAND (t, 1);
1128 if (TREE_INT_CST_HIGH (op1) == 0
1129 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1130 == TREE_INT_CST_LOW (op1))
1132 tree ntype = TYPE_UNSIGNED (type)
1133 ? lang_hooks.types.signed_type (type)
1134 : lang_hooks.types.unsigned_type (type);
1135 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1136 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1137 return fold_convert (type, temp);
1146 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1147 return fold_convert (type, tem);
1150 /* Split a tree IN into a constant, literal and variable parts that could be
1151 combined with CODE to make IN. "constant" means an expression with
1152 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1153 commutative arithmetic operation. Store the constant part into *CONP,
1154 the literal in *LITP and return the variable part. If a part isn't
1155 present, set it to null. If the tree does not decompose in this way,
1156 return the entire tree as the variable part and the other parts as null.
1158 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1159 case, we negate an operand that was subtracted. Except if it is a
1160 literal for which we use *MINUS_LITP instead.
1162 If NEGATE_P is true, we are negating all of IN, again except a literal
1163 for which we use *MINUS_LITP instead.
1165 If IN is itself a literal or constant, return it as appropriate.
1167 Note that we do not guarantee that any of the three values will be the
1168 same type as IN, but they will have the same signedness and mode. */
1171 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1172 tree *minus_litp, int negate_p)
1180 /* Strip any conversions that don't change the machine mode or signedness. */
1181 STRIP_SIGN_NOPS (in);
1183 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1185 else if (TREE_CODE (in) == code
1186 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1187 /* We can associate addition and subtraction together (even
1188 though the C standard doesn't say so) for integers because
1189 the value is not affected. For reals, the value might be
1190 affected, so we can't. */
1191 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1192 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1194 tree op0 = TREE_OPERAND (in, 0);
1195 tree op1 = TREE_OPERAND (in, 1);
1196 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1197 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1199 /* First see if either of the operands is a literal, then a constant. */
1200 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1201 *litp = op0, op0 = 0;
1202 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1203 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1205 if (op0 != 0 && TREE_CONSTANT (op0))
1206 *conp = op0, op0 = 0;
1207 else if (op1 != 0 && TREE_CONSTANT (op1))
1208 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1210 /* If we haven't dealt with either operand, this is not a case we can
1211 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1212 if (op0 != 0 && op1 != 0)
1217 var = op1, neg_var_p = neg1_p;
1219 /* Now do any needed negations. */
1221 *minus_litp = *litp, *litp = 0;
1223 *conp = negate_expr (*conp);
1225 var = negate_expr (var);
1227 else if (TREE_CONSTANT (in))
1235 *minus_litp = *litp, *litp = 0;
1236 else if (*minus_litp)
1237 *litp = *minus_litp, *minus_litp = 0;
1238 *conp = negate_expr (*conp);
1239 var = negate_expr (var);
1245 /* Re-associate trees split by the above function. T1 and T2 are either
1246 expressions to associate or null. Return the new expression, if any. If
1247 we build an operation, do it in TYPE and with CODE. */
1250 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1257 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1258 try to fold this since we will have infinite recursion. But do
1259 deal with any NEGATE_EXPRs. */
1260 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1261 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1263 if (code == PLUS_EXPR)
1265 if (TREE_CODE (t1) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1267 fold_convert (type, TREE_OPERAND (t1, 0)));
1268 else if (TREE_CODE (t2) == NEGATE_EXPR)
1269 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1270 fold_convert (type, TREE_OPERAND (t2, 0)));
1272 return build2 (code, type, fold_convert (type, t1),
1273 fold_convert (type, t2));
1276 return fold (build2 (code, type, fold_convert (type, t1),
1277 fold_convert (type, t2)));
1280 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1281 to produce a new constant.
1283 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1286 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1288 unsigned HOST_WIDE_INT int1l, int2l;
1289 HOST_WIDE_INT int1h, int2h;
1290 unsigned HOST_WIDE_INT low;
1292 unsigned HOST_WIDE_INT garbagel;
1293 HOST_WIDE_INT garbageh;
1295 tree type = TREE_TYPE (arg1);
1296 int uns = TYPE_UNSIGNED (type);
1298 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1300 int no_overflow = 0;
1302 int1l = TREE_INT_CST_LOW (arg1);
1303 int1h = TREE_INT_CST_HIGH (arg1);
1304 int2l = TREE_INT_CST_LOW (arg2);
1305 int2h = TREE_INT_CST_HIGH (arg2);
1310 low = int1l | int2l, hi = int1h | int2h;
1314 low = int1l ^ int2l, hi = int1h ^ int2h;
1318 low = int1l & int2l, hi = int1h & int2h;
1324 /* It's unclear from the C standard whether shifts can overflow.
1325 The following code ignores overflow; perhaps a C standard
1326 interpretation ruling is needed. */
1327 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1335 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1344 neg_double (int2l, int2h, &low, &hi);
1345 add_double (int1l, int1h, low, hi, &low, &hi);
1346 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1350 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1353 case TRUNC_DIV_EXPR:
1354 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1355 case EXACT_DIV_EXPR:
1356 /* This is a shortcut for a common special case. */
1357 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1358 && ! TREE_CONSTANT_OVERFLOW (arg1)
1359 && ! TREE_CONSTANT_OVERFLOW (arg2)
1360 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1362 if (code == CEIL_DIV_EXPR)
1365 low = int1l / int2l, hi = 0;
1369 /* ... fall through ... */
1371 case ROUND_DIV_EXPR:
1372 if (int2h == 0 && int2l == 1)
1374 low = int1l, hi = int1h;
1377 if (int1l == int2l && int1h == int2h
1378 && ! (int1l == 0 && int1h == 0))
1383 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1384 &low, &hi, &garbagel, &garbageh);
1387 case TRUNC_MOD_EXPR:
1388 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1389 /* This is a shortcut for a common special case. */
1390 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1391 && ! TREE_CONSTANT_OVERFLOW (arg1)
1392 && ! TREE_CONSTANT_OVERFLOW (arg2)
1393 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1395 if (code == CEIL_MOD_EXPR)
1397 low = int1l % int2l, hi = 0;
1401 /* ... fall through ... */
1403 case ROUND_MOD_EXPR:
1404 overflow = div_and_round_double (code, uns,
1405 int1l, int1h, int2l, int2h,
1406 &garbagel, &garbageh, &low, &hi);
1412 low = (((unsigned HOST_WIDE_INT) int1h
1413 < (unsigned HOST_WIDE_INT) int2h)
1414 || (((unsigned HOST_WIDE_INT) int1h
1415 == (unsigned HOST_WIDE_INT) int2h)
1418 low = (int1h < int2h
1419 || (int1h == int2h && int1l < int2l));
1421 if (low == (code == MIN_EXPR))
1422 low = int1l, hi = int1h;
1424 low = int2l, hi = int2h;
1431 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1435 /* Propagate overflow flags ourselves. */
1436 if (((!uns || is_sizetype) && overflow)
1437 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1440 TREE_OVERFLOW (t) = 1;
1441 TREE_CONSTANT_OVERFLOW (t) = 1;
1443 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1450 t = force_fit_type (t, 1,
1451 ((!uns || is_sizetype) && overflow)
1452 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1453 TREE_CONSTANT_OVERFLOW (arg1)
1454 | TREE_CONSTANT_OVERFLOW (arg2));
1459 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1460 constant. We assume ARG1 and ARG2 have the same data type, or at least
1461 are the same kind of constant and the same machine mode.
1463 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1466 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1471 if (TREE_CODE (arg1) == INTEGER_CST)
1472 return int_const_binop (code, arg1, arg2, notrunc);
1474 if (TREE_CODE (arg1) == REAL_CST)
1476 enum machine_mode mode;
1479 REAL_VALUE_TYPE value;
1482 d1 = TREE_REAL_CST (arg1);
1483 d2 = TREE_REAL_CST (arg2);
1485 type = TREE_TYPE (arg1);
1486 mode = TYPE_MODE (type);
1488 /* Don't perform operation if we honor signaling NaNs and
1489 either operand is a NaN. */
1490 if (HONOR_SNANS (mode)
1491 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1494 /* Don't perform operation if it would raise a division
1495 by zero exception. */
1496 if (code == RDIV_EXPR
1497 && REAL_VALUES_EQUAL (d2, dconst0)
1498 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1501 /* If either operand is a NaN, just return it. Otherwise, set up
1502 for floating-point trap; we return an overflow. */
1503 if (REAL_VALUE_ISNAN (d1))
1505 else if (REAL_VALUE_ISNAN (d2))
1508 REAL_ARITHMETIC (value, code, d1, d2);
1510 t = build_real (type, real_value_truncate (mode, value));
1512 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1513 TREE_CONSTANT_OVERFLOW (t)
1515 | TREE_CONSTANT_OVERFLOW (arg1)
1516 | TREE_CONSTANT_OVERFLOW (arg2);
1519 if (TREE_CODE (arg1) == COMPLEX_CST)
1521 tree type = TREE_TYPE (arg1);
1522 tree r1 = TREE_REALPART (arg1);
1523 tree i1 = TREE_IMAGPART (arg1);
1524 tree r2 = TREE_REALPART (arg2);
1525 tree i2 = TREE_IMAGPART (arg2);
1531 t = build_complex (type,
1532 const_binop (PLUS_EXPR, r1, r2, notrunc),
1533 const_binop (PLUS_EXPR, i1, i2, notrunc));
1537 t = build_complex (type,
1538 const_binop (MINUS_EXPR, r1, r2, notrunc),
1539 const_binop (MINUS_EXPR, i1, i2, notrunc));
1543 t = build_complex (type,
1544 const_binop (MINUS_EXPR,
1545 const_binop (MULT_EXPR,
1547 const_binop (MULT_EXPR,
1550 const_binop (PLUS_EXPR,
1551 const_binop (MULT_EXPR,
1553 const_binop (MULT_EXPR,
1561 = const_binop (PLUS_EXPR,
1562 const_binop (MULT_EXPR, r2, r2, notrunc),
1563 const_binop (MULT_EXPR, i2, i2, notrunc),
1566 t = build_complex (type,
1568 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1569 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1570 const_binop (PLUS_EXPR,
1571 const_binop (MULT_EXPR, r1, r2,
1573 const_binop (MULT_EXPR, i1, i2,
1576 magsquared, notrunc),
1578 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1579 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1580 const_binop (MINUS_EXPR,
1581 const_binop (MULT_EXPR, i1, r2,
1583 const_binop (MULT_EXPR, r1, i2,
1586 magsquared, notrunc));
1598 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1599 indicates which particular sizetype to create. */
1602 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1604 return build_int_cst (sizetype_tab[(int) kind], number);
1607 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1608 is a tree code. The type of the result is taken from the operands.
1609 Both must be the same type integer type and it must be a size type.
1610 If the operands are constant, so is the result. */
1613 size_binop (enum tree_code code, tree arg0, tree arg1)
1615 tree type = TREE_TYPE (arg0);
1617 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1618 || type != TREE_TYPE (arg1))
1621 /* Handle the special case of two integer constants faster. */
1622 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1624 /* And some specific cases even faster than that. */
1625 if (code == PLUS_EXPR && integer_zerop (arg0))
1627 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1628 && integer_zerop (arg1))
1630 else if (code == MULT_EXPR && integer_onep (arg0))
1633 /* Handle general case of two integer constants. */
1634 return int_const_binop (code, arg0, arg1, 0);
1637 if (arg0 == error_mark_node || arg1 == error_mark_node)
1638 return error_mark_node;
1640 return fold (build2 (code, type, arg0, arg1));
1643 /* Given two values, either both of sizetype or both of bitsizetype,
1644 compute the difference between the two values. Return the value
1645 in signed type corresponding to the type of the operands. */
1648 size_diffop (tree arg0, tree arg1)
1650 tree type = TREE_TYPE (arg0);
1653 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1654 || type != TREE_TYPE (arg1))
1657 /* If the type is already signed, just do the simple thing. */
1658 if (!TYPE_UNSIGNED (type))
1659 return size_binop (MINUS_EXPR, arg0, arg1);
1661 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1663 /* If either operand is not a constant, do the conversions to the signed
1664 type and subtract. The hardware will do the right thing with any
1665 overflow in the subtraction. */
1666 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1667 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1668 fold_convert (ctype, arg1));
1670 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1671 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1672 overflow) and negate (which can't either). Special-case a result
1673 of zero while we're here. */
1674 if (tree_int_cst_equal (arg0, arg1))
1675 return fold_convert (ctype, integer_zero_node);
1676 else if (tree_int_cst_lt (arg1, arg0))
1677 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1679 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1680 fold_convert (ctype, size_binop (MINUS_EXPR,
1685 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1686 type TYPE. If no simplification can be done return NULL_TREE. */
1689 fold_convert_const (enum tree_code code, tree type, tree arg1)
1694 if (TREE_TYPE (arg1) == type)
1697 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1699 if (TREE_CODE (arg1) == INTEGER_CST)
1701 /* If we would build a constant wider than GCC supports,
1702 leave the conversion unfolded. */
1703 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1706 /* Given an integer constant, make new constant with new type,
1707 appropriately sign-extended or truncated. */
1708 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1709 TREE_INT_CST_HIGH (arg1));
1711 t = force_fit_type (t,
1712 /* Don't set the overflow when
1713 converting a pointer */
1714 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1715 (TREE_INT_CST_HIGH (arg1) < 0
1716 && (TYPE_UNSIGNED (type)
1717 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1718 | TREE_OVERFLOW (arg1),
1719 TREE_CONSTANT_OVERFLOW (arg1));
1722 else if (TREE_CODE (arg1) == REAL_CST)
1724 /* The following code implements the floating point to integer
1725 conversion rules required by the Java Language Specification,
1726 that IEEE NaNs are mapped to zero and values that overflow
1727 the target precision saturate, i.e. values greater than
1728 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1729 are mapped to INT_MIN. These semantics are allowed by the
1730 C and C++ standards that simply state that the behavior of
1731 FP-to-integer conversion is unspecified upon overflow. */
1733 HOST_WIDE_INT high, low;
1735 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1739 case FIX_TRUNC_EXPR:
1740 real_trunc (&r, VOIDmode, &x);
1744 real_ceil (&r, VOIDmode, &x);
1747 case FIX_FLOOR_EXPR:
1748 real_floor (&r, VOIDmode, &x);
1751 case FIX_ROUND_EXPR:
1752 real_round (&r, VOIDmode, &x);
1759 /* If R is NaN, return zero and show we have an overflow. */
1760 if (REAL_VALUE_ISNAN (r))
1767 /* See if R is less than the lower bound or greater than the
1772 tree lt = TYPE_MIN_VALUE (type);
1773 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1774 if (REAL_VALUES_LESS (r, l))
1777 high = TREE_INT_CST_HIGH (lt);
1778 low = TREE_INT_CST_LOW (lt);
1784 tree ut = TYPE_MAX_VALUE (type);
1787 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1788 if (REAL_VALUES_LESS (u, r))
1791 high = TREE_INT_CST_HIGH (ut);
1792 low = TREE_INT_CST_LOW (ut);
1798 REAL_VALUE_TO_INT (&low, &high, r);
1800 t = build_int_cst_wide (type, low, high);
1802 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1803 TREE_CONSTANT_OVERFLOW (arg1));
1807 else if (TREE_CODE (type) == REAL_TYPE)
1809 if (TREE_CODE (arg1) == INTEGER_CST)
1810 return build_real_from_int_cst (type, arg1);
1811 if (TREE_CODE (arg1) == REAL_CST)
1813 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1815 /* We make a copy of ARG1 so that we don't modify an
1816 existing constant tree. */
1817 t = copy_node (arg1);
1818 TREE_TYPE (t) = type;
1822 t = build_real (type,
1823 real_value_truncate (TYPE_MODE (type),
1824 TREE_REAL_CST (arg1)));
1826 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1827 TREE_CONSTANT_OVERFLOW (t)
1828 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1835 /* Convert expression ARG to type TYPE. Used by the middle-end for
1836 simple conversions in preference to calling the front-end's convert. */
1839 fold_convert (tree type, tree arg)
1841 tree orig = TREE_TYPE (arg);
1847 if (TREE_CODE (arg) == ERROR_MARK
1848 || TREE_CODE (type) == ERROR_MARK
1849 || TREE_CODE (orig) == ERROR_MARK)
1850 return error_mark_node;
1852 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1853 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1854 TYPE_MAIN_VARIANT (orig)))
1855 return fold (build1 (NOP_EXPR, type, arg));
1857 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1858 || TREE_CODE (type) == OFFSET_TYPE)
1860 if (TREE_CODE (arg) == INTEGER_CST)
1862 tem = fold_convert_const (NOP_EXPR, type, arg);
1863 if (tem != NULL_TREE)
1866 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1867 || TREE_CODE (orig) == OFFSET_TYPE)
1868 return fold (build1 (NOP_EXPR, type, arg));
1869 if (TREE_CODE (orig) == COMPLEX_TYPE)
1871 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1872 return fold_convert (type, tem);
1874 if (TREE_CODE (orig) == VECTOR_TYPE
1875 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
1876 return fold (build1 (NOP_EXPR, type, arg));
1878 else if (TREE_CODE (type) == REAL_TYPE)
1880 if (TREE_CODE (arg) == INTEGER_CST)
1882 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1883 if (tem != NULL_TREE)
1886 else if (TREE_CODE (arg) == REAL_CST)
1888 tem = fold_convert_const (NOP_EXPR, type, arg);
1889 if (tem != NULL_TREE)
1893 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1894 return fold (build1 (FLOAT_EXPR, type, arg));
1895 if (TREE_CODE (orig) == REAL_TYPE)
1896 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1898 if (TREE_CODE (orig) == COMPLEX_TYPE)
1900 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1901 return fold_convert (type, tem);
1904 else if (TREE_CODE (type) == COMPLEX_TYPE)
1906 if (INTEGRAL_TYPE_P (orig)
1907 || POINTER_TYPE_P (orig)
1908 || TREE_CODE (orig) == REAL_TYPE)
1909 return build2 (COMPLEX_EXPR, type,
1910 fold_convert (TREE_TYPE (type), arg),
1911 fold_convert (TREE_TYPE (type), integer_zero_node));
1912 if (TREE_CODE (orig) == COMPLEX_TYPE)
1916 if (TREE_CODE (arg) == COMPLEX_EXPR)
1918 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1919 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1920 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1923 arg = save_expr (arg);
1924 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1925 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1926 rpart = fold_convert (TREE_TYPE (type), rpart);
1927 ipart = fold_convert (TREE_TYPE (type), ipart);
1928 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1931 else if (TREE_CODE (type) == VECTOR_TYPE)
1933 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1934 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
1935 return fold (build1 (NOP_EXPR, type, arg));
1936 if (TREE_CODE (orig) == VECTOR_TYPE
1937 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
1938 return fold (build1 (NOP_EXPR, type, arg));
1940 else if (VOID_TYPE_P (type))
1941 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1945 /* Return an expr equal to X but certainly not valid as an lvalue. */
1950 /* We only need to wrap lvalue tree codes. */
1951 switch (TREE_CODE (x))
1963 case ARRAY_RANGE_REF:
1969 case PREINCREMENT_EXPR:
1970 case PREDECREMENT_EXPR:
1972 case TRY_CATCH_EXPR:
1973 case WITH_CLEANUP_EXPR:
1984 /* Assume the worst for front-end tree codes. */
1985 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
1989 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1992 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1993 Zero means allow extended lvalues. */
1995 int pedantic_lvalues;
1997 /* When pedantic, return an expr equal to X but certainly not valid as a
1998 pedantic lvalue. Otherwise, return X. */
2001 pedantic_non_lvalue (tree x)
2003 if (pedantic_lvalues)
2004 return non_lvalue (x);
2009 /* Given a tree comparison code, return the code that is the logical inverse
2010 of the given code. It is not safe to do this for floating-point
2011 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2012 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2014 static enum tree_code
2015 invert_tree_comparison (enum tree_code code, bool honor_nans)
2017 if (honor_nans && flag_trapping_math)
2027 return honor_nans ? UNLE_EXPR : LE_EXPR;
2029 return honor_nans ? UNLT_EXPR : LT_EXPR;
2031 return honor_nans ? UNGE_EXPR : GE_EXPR;
2033 return honor_nans ? UNGT_EXPR : GT_EXPR;
2047 return UNORDERED_EXPR;
2048 case UNORDERED_EXPR:
2049 return ORDERED_EXPR;
2055 /* Similar, but return the comparison that results if the operands are
2056 swapped. This is safe for floating-point. */
2059 swap_tree_comparison (enum tree_code code)
2080 /* Convert a comparison tree code from an enum tree_code representation
2081 into a compcode bit-based encoding. This function is the inverse of
2082 compcode_to_comparison. */
2084 static enum comparison_code
2085 comparison_to_compcode (enum tree_code code)
2102 return COMPCODE_ORD;
2103 case UNORDERED_EXPR:
2104 return COMPCODE_UNORD;
2106 return COMPCODE_UNLT;
2108 return COMPCODE_UNEQ;
2110 return COMPCODE_UNLE;
2112 return COMPCODE_UNGT;
2114 return COMPCODE_LTGT;
2116 return COMPCODE_UNGE;
2122 /* Convert a compcode bit-based encoding of a comparison operator back
2123 to GCC's enum tree_code representation. This function is the
2124 inverse of comparison_to_compcode. */
2126 static enum tree_code
2127 compcode_to_comparison (enum comparison_code code)
2144 return ORDERED_EXPR;
2145 case COMPCODE_UNORD:
2146 return UNORDERED_EXPR;
2164 /* Return a tree for the comparison which is the combination of
2165 doing the AND or OR (depending on CODE) of the two operations LCODE
2166 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2167 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2168 if this makes the transformation invalid. */
2171 combine_comparisons (enum tree_code code, enum tree_code lcode,
2172 enum tree_code rcode, tree truth_type,
2173 tree ll_arg, tree lr_arg)
2175 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2176 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2177 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2178 enum comparison_code compcode;
2182 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2183 compcode = lcompcode & rcompcode;
2186 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2187 compcode = lcompcode | rcompcode;
2196 /* Eliminate unordered comparisons, as well as LTGT and ORD
2197 which are not used unless the mode has NaNs. */
2198 compcode &= ~COMPCODE_UNORD;
2199 if (compcode == COMPCODE_LTGT)
2200 compcode = COMPCODE_NE;
2201 else if (compcode == COMPCODE_ORD)
2202 compcode = COMPCODE_TRUE;
2204 else if (flag_trapping_math)
2206 /* Check that the original operation and the optimized ones will trap
2207 under the same condition. */
2208 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2209 && (lcompcode != COMPCODE_EQ)
2210 && (lcompcode != COMPCODE_ORD);
2211 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2212 && (rcompcode != COMPCODE_EQ)
2213 && (rcompcode != COMPCODE_ORD);
2214 bool trap = (compcode & COMPCODE_UNORD) == 0
2215 && (compcode != COMPCODE_EQ)
2216 && (compcode != COMPCODE_ORD);
2218 /* In a short-circuited boolean expression the LHS might be
2219 such that the RHS, if evaluated, will never trap. For
2220 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2221 if neither x nor y is NaN. (This is a mixed blessing: for
2222 example, the expression above will never trap, hence
2223 optimizing it to x < y would be invalid). */
2224 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2225 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2228 /* If the comparison was short-circuited, and only the RHS
2229 trapped, we may now generate a spurious trap. */
2231 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2234 /* If we changed the conditions that cause a trap, we lose. */
2235 if ((ltrap || rtrap) != trap)
2239 if (compcode == COMPCODE_TRUE)
2240 return constant_boolean_node (true, truth_type);
2241 else if (compcode == COMPCODE_FALSE)
2242 return constant_boolean_node (false, truth_type);
2244 return fold (build2 (compcode_to_comparison (compcode),
2245 truth_type, ll_arg, lr_arg));
2248 /* Return nonzero if CODE is a tree code that represents a truth value. */
2251 truth_value_p (enum tree_code code)
2253 return (TREE_CODE_CLASS (code) == '<'
2254 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2255 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2256 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2259 /* Return nonzero if two operands (typically of the same tree node)
2260 are necessarily equal. If either argument has side-effects this
2261 function returns zero. FLAGS modifies behavior as follows:
2263 If OEP_ONLY_CONST is set, only return nonzero for constants.
2264 This function tests whether the operands are indistinguishable;
2265 it does not test whether they are equal using C's == operation.
2266 The distinction is important for IEEE floating point, because
2267 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2268 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2270 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2271 even though it may hold multiple values during a function.
2272 This is because a GCC tree node guarantees that nothing else is
2273 executed between the evaluation of its "operands" (which may often
2274 be evaluated in arbitrary order). Hence if the operands themselves
2275 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2276 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2277 unset means assuming isochronic (or instantaneous) tree equivalence.
2278 Unless comparing arbitrary expression trees, such as from different
2279 statements, this flag can usually be left unset.
2281 If OEP_PURE_SAME is set, then pure functions with identical arguments
2282 are considered the same. It is used when the caller has other ways
2283 to ensure that global memory is unchanged in between. */
2286 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2288 /* If one is specified and the other isn't, they aren't equal and if
2289 neither is specified, they are.
2291 ??? This is temporary and is meant only to handle the cases of the
2292 optional operands for COMPONENT_REF and ARRAY_REF. */
2293 if ((arg0 && !arg1) || (!arg0 && arg1))
2295 else if (!arg0 && !arg1)
2297 /* If either is ERROR_MARK, they aren't equal. */
2298 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2301 /* If both types don't have the same signedness, then we can't consider
2302 them equal. We must check this before the STRIP_NOPS calls
2303 because they may change the signedness of the arguments. */
2304 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2310 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2311 /* This is needed for conversions and for COMPONENT_REF.
2312 Might as well play it safe and always test this. */
2313 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2314 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2315 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2318 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2319 We don't care about side effects in that case because the SAVE_EXPR
2320 takes care of that for us. In all other cases, two expressions are
2321 equal if they have no side effects. If we have two identical
2322 expressions with side effects that should be treated the same due
2323 to the only side effects being identical SAVE_EXPR's, that will
2324 be detected in the recursive calls below. */
2325 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2326 && (TREE_CODE (arg0) == SAVE_EXPR
2327 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2330 /* Next handle constant cases, those for which we can return 1 even
2331 if ONLY_CONST is set. */
2332 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2333 switch (TREE_CODE (arg0))
2336 return (! TREE_CONSTANT_OVERFLOW (arg0)
2337 && ! TREE_CONSTANT_OVERFLOW (arg1)
2338 && tree_int_cst_equal (arg0, arg1));
2341 return (! TREE_CONSTANT_OVERFLOW (arg0)
2342 && ! TREE_CONSTANT_OVERFLOW (arg1)
2343 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2344 TREE_REAL_CST (arg1)));
2350 if (TREE_CONSTANT_OVERFLOW (arg0)
2351 || TREE_CONSTANT_OVERFLOW (arg1))
2354 v1 = TREE_VECTOR_CST_ELTS (arg0);
2355 v2 = TREE_VECTOR_CST_ELTS (arg1);
2358 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2361 v1 = TREE_CHAIN (v1);
2362 v2 = TREE_CHAIN (v2);
2369 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2371 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2375 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2376 && ! memcmp (TREE_STRING_POINTER (arg0),
2377 TREE_STRING_POINTER (arg1),
2378 TREE_STRING_LENGTH (arg0)));
2381 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2387 if (flags & OEP_ONLY_CONST)
2390 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2393 /* Two conversions are equal only if signedness and modes match. */
2394 switch (TREE_CODE (arg0))
2399 case FIX_TRUNC_EXPR:
2400 case FIX_FLOOR_EXPR:
2401 case FIX_ROUND_EXPR:
2402 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2403 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2410 return operand_equal_p (TREE_OPERAND (arg0, 0),
2411 TREE_OPERAND (arg1, 0), flags);
2415 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2416 TREE_OPERAND (arg1, 0), flags)
2417 && operand_equal_p (TREE_OPERAND (arg0, 1),
2418 TREE_OPERAND (arg1, 1), flags))
2421 /* For commutative ops, allow the other order. */
2422 return (commutative_tree_code (TREE_CODE (arg0))
2423 && operand_equal_p (TREE_OPERAND (arg0, 0),
2424 TREE_OPERAND (arg1, 1), flags)
2425 && operand_equal_p (TREE_OPERAND (arg0, 1),
2426 TREE_OPERAND (arg1, 0), flags));
2429 /* If either of the pointer (or reference) expressions we are
2430 dereferencing contain a side effect, these cannot be equal. */
2431 if (TREE_SIDE_EFFECTS (arg0)
2432 || TREE_SIDE_EFFECTS (arg1))
2435 switch (TREE_CODE (arg0))
2440 return operand_equal_p (TREE_OPERAND (arg0, 0),
2441 TREE_OPERAND (arg1, 0), flags);
2444 case ARRAY_RANGE_REF:
2445 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2446 TREE_OPERAND (arg1, 0), flags)
2447 && operand_equal_p (TREE_OPERAND (arg0, 1),
2448 TREE_OPERAND (arg1, 1), flags)
2449 && operand_equal_p (TREE_OPERAND (arg0, 2),
2450 TREE_OPERAND (arg1, 2), flags)
2451 && operand_equal_p (TREE_OPERAND (arg0, 3),
2452 TREE_OPERAND (arg1, 3), flags));
2456 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2457 TREE_OPERAND (arg1, 0), flags)
2458 && operand_equal_p (TREE_OPERAND (arg0, 1),
2459 TREE_OPERAND (arg1, 1), flags)
2460 && operand_equal_p (TREE_OPERAND (arg0, 2),
2461 TREE_OPERAND (arg1, 2), flags));
2465 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2466 TREE_OPERAND (arg1, 0), flags)
2467 && operand_equal_p (TREE_OPERAND (arg0, 1),
2468 TREE_OPERAND (arg1, 1), flags)
2469 && operand_equal_p (TREE_OPERAND (arg0, 2),
2470 TREE_OPERAND (arg1, 2), flags));
2476 switch (TREE_CODE (arg0))
2479 case TRUTH_NOT_EXPR:
2480 return operand_equal_p (TREE_OPERAND (arg0, 0),
2481 TREE_OPERAND (arg1, 0), flags);
2483 case TRUTH_ANDIF_EXPR:
2484 case TRUTH_ORIF_EXPR:
2485 return operand_equal_p (TREE_OPERAND (arg0, 0),
2486 TREE_OPERAND (arg1, 0), flags)
2487 && operand_equal_p (TREE_OPERAND (arg0, 1),
2488 TREE_OPERAND (arg1, 1), flags);
2490 case TRUTH_AND_EXPR:
2492 case TRUTH_XOR_EXPR:
2493 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2494 TREE_OPERAND (arg1, 0), flags)
2495 && operand_equal_p (TREE_OPERAND (arg0, 1),
2496 TREE_OPERAND (arg1, 1), flags))
2497 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2498 TREE_OPERAND (arg1, 1), flags)
2499 && operand_equal_p (TREE_OPERAND (arg0, 1),
2500 TREE_OPERAND (arg1, 0), flags));
2503 /* If the CALL_EXPRs call different functions, then they
2504 clearly can not be equal. */
2505 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2506 TREE_OPERAND (arg1, 0), flags))
2510 unsigned int cef = call_expr_flags (arg0);
2511 if (flags & OEP_PURE_SAME)
2512 cef &= ECF_CONST | ECF_PURE;
2519 /* Now see if all the arguments are the same. operand_equal_p
2520 does not handle TREE_LIST, so we walk the operands here
2521 feeding them to operand_equal_p. */
2522 arg0 = TREE_OPERAND (arg0, 1);
2523 arg1 = TREE_OPERAND (arg1, 1);
2524 while (arg0 && arg1)
2526 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2530 arg0 = TREE_CHAIN (arg0);
2531 arg1 = TREE_CHAIN (arg1);
2534 /* If we get here and both argument lists are exhausted
2535 then the CALL_EXPRs are equal. */
2536 return ! (arg0 || arg1);
2543 /* Consider __builtin_sqrt equal to sqrt. */
2544 return (TREE_CODE (arg0) == FUNCTION_DECL
2545 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2546 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2547 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2554 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2555 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2557 When in doubt, return 0. */
2560 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2562 int unsignedp1, unsignedpo;
2563 tree primarg0, primarg1, primother;
2564 unsigned int correct_width;
2566 if (operand_equal_p (arg0, arg1, 0))
2569 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2570 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2573 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2574 and see if the inner values are the same. This removes any
2575 signedness comparison, which doesn't matter here. */
2576 primarg0 = arg0, primarg1 = arg1;
2577 STRIP_NOPS (primarg0);
2578 STRIP_NOPS (primarg1);
2579 if (operand_equal_p (primarg0, primarg1, 0))
2582 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2583 actual comparison operand, ARG0.
2585 First throw away any conversions to wider types
2586 already present in the operands. */
2588 primarg1 = get_narrower (arg1, &unsignedp1);
2589 primother = get_narrower (other, &unsignedpo);
2591 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2592 if (unsignedp1 == unsignedpo
2593 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2594 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2596 tree type = TREE_TYPE (arg0);
2598 /* Make sure shorter operand is extended the right way
2599 to match the longer operand. */
2600 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2601 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2603 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2610 /* See if ARG is an expression that is either a comparison or is performing
2611 arithmetic on comparisons. The comparisons must only be comparing
2612 two different values, which will be stored in *CVAL1 and *CVAL2; if
2613 they are nonzero it means that some operands have already been found.
2614 No variables may be used anywhere else in the expression except in the
2615 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2616 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2618 If this is true, return 1. Otherwise, return zero. */
2621 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2623 enum tree_code code = TREE_CODE (arg);
2624 char class = TREE_CODE_CLASS (code);
2626 /* We can handle some of the 'e' cases here. */
2627 if (class == 'e' && code == TRUTH_NOT_EXPR)
2629 else if (class == 'e'
2630 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2631 || code == COMPOUND_EXPR))
2634 else if (class == 'e' && code == SAVE_EXPR
2635 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2637 /* If we've already found a CVAL1 or CVAL2, this expression is
2638 two complex to handle. */
2639 if (*cval1 || *cval2)
2649 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2652 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2653 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2654 cval1, cval2, save_p));
2660 if (code == COND_EXPR)
2661 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2662 cval1, cval2, save_p)
2663 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2664 cval1, cval2, save_p)
2665 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2666 cval1, cval2, save_p));
2670 /* First see if we can handle the first operand, then the second. For
2671 the second operand, we know *CVAL1 can't be zero. It must be that
2672 one side of the comparison is each of the values; test for the
2673 case where this isn't true by failing if the two operands
2676 if (operand_equal_p (TREE_OPERAND (arg, 0),
2677 TREE_OPERAND (arg, 1), 0))
2681 *cval1 = TREE_OPERAND (arg, 0);
2682 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2684 else if (*cval2 == 0)
2685 *cval2 = TREE_OPERAND (arg, 0);
2686 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2691 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2693 else if (*cval2 == 0)
2694 *cval2 = TREE_OPERAND (arg, 1);
2695 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2707 /* ARG is a tree that is known to contain just arithmetic operations and
2708 comparisons. Evaluate the operations in the tree substituting NEW0 for
2709 any occurrence of OLD0 as an operand of a comparison and likewise for
2713 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2715 tree type = TREE_TYPE (arg);
2716 enum tree_code code = TREE_CODE (arg);
2717 char class = TREE_CODE_CLASS (code);
2719 /* We can handle some of the 'e' cases here. */
2720 if (class == 'e' && code == TRUTH_NOT_EXPR)
2722 else if (class == 'e'
2723 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2729 return fold (build1 (code, type,
2730 eval_subst (TREE_OPERAND (arg, 0),
2731 old0, new0, old1, new1)));
2734 return fold (build2 (code, type,
2735 eval_subst (TREE_OPERAND (arg, 0),
2736 old0, new0, old1, new1),
2737 eval_subst (TREE_OPERAND (arg, 1),
2738 old0, new0, old1, new1)));
2744 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2747 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2750 return fold (build3 (code, type,
2751 eval_subst (TREE_OPERAND (arg, 0),
2752 old0, new0, old1, new1),
2753 eval_subst (TREE_OPERAND (arg, 1),
2754 old0, new0, old1, new1),
2755 eval_subst (TREE_OPERAND (arg, 2),
2756 old0, new0, old1, new1)));
2760 /* Fall through - ??? */
2764 tree arg0 = TREE_OPERAND (arg, 0);
2765 tree arg1 = TREE_OPERAND (arg, 1);
2767 /* We need to check both for exact equality and tree equality. The
2768 former will be true if the operand has a side-effect. In that
2769 case, we know the operand occurred exactly once. */
2771 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2773 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2776 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2778 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2781 return fold (build2 (code, type, arg0, arg1));
2789 /* Return a tree for the case when the result of an expression is RESULT
2790 converted to TYPE and OMITTED was previously an operand of the expression
2791 but is now not needed (e.g., we folded OMITTED * 0).
2793 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2794 the conversion of RESULT to TYPE. */
2797 omit_one_operand (tree type, tree result, tree omitted)
2799 tree t = fold_convert (type, result);
2801 if (TREE_SIDE_EFFECTS (omitted))
2802 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2804 return non_lvalue (t);
2807 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2810 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2812 tree t = fold_convert (type, result);
2814 if (TREE_SIDE_EFFECTS (omitted))
2815 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2817 return pedantic_non_lvalue (t);
2820 /* Return a tree for the case when the result of an expression is RESULT
2821 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2822 of the expression but are now not needed.
2824 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2825 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2826 evaluated before OMITTED2. Otherwise, if neither has side effects,
2827 just do the conversion of RESULT to TYPE. */
2830 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2832 tree t = fold_convert (type, result);
2834 if (TREE_SIDE_EFFECTS (omitted2))
2835 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2836 if (TREE_SIDE_EFFECTS (omitted1))
2837 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2839 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2843 /* Return a simplified tree node for the truth-negation of ARG. This
2844 never alters ARG itself. We assume that ARG is an operation that
2845 returns a truth value (0 or 1).
2847 FIXME: one would think we would fold the result, but it causes
2848 problems with the dominator optimizer. */
2850 invert_truthvalue (tree arg)
2852 tree type = TREE_TYPE (arg);
2853 enum tree_code code = TREE_CODE (arg);
2855 if (code == ERROR_MARK)
2858 /* If this is a comparison, we can simply invert it, except for
2859 floating-point non-equality comparisons, in which case we just
2860 enclose a TRUTH_NOT_EXPR around what we have. */
2862 if (TREE_CODE_CLASS (code) == '<')
2864 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2865 if (FLOAT_TYPE_P (op_type)
2866 && flag_trapping_math
2867 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2868 && code != NE_EXPR && code != EQ_EXPR)
2869 return build1 (TRUTH_NOT_EXPR, type, arg);
2872 code = invert_tree_comparison (code,
2873 HONOR_NANS (TYPE_MODE (op_type)));
2874 if (code == ERROR_MARK)
2875 return build1 (TRUTH_NOT_EXPR, type, arg);
2877 return build2 (code, type,
2878 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2885 return fold_convert (type,
2886 build_int_cst (NULL_TREE, integer_zerop (arg)));
2888 case TRUTH_AND_EXPR:
2889 return build2 (TRUTH_OR_EXPR, type,
2890 invert_truthvalue (TREE_OPERAND (arg, 0)),
2891 invert_truthvalue (TREE_OPERAND (arg, 1)));
2894 return build2 (TRUTH_AND_EXPR, type,
2895 invert_truthvalue (TREE_OPERAND (arg, 0)),
2896 invert_truthvalue (TREE_OPERAND (arg, 1)));
2898 case TRUTH_XOR_EXPR:
2899 /* Here we can invert either operand. We invert the first operand
2900 unless the second operand is a TRUTH_NOT_EXPR in which case our
2901 result is the XOR of the first operand with the inside of the
2902 negation of the second operand. */
2904 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2905 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2906 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2908 return build2 (TRUTH_XOR_EXPR, type,
2909 invert_truthvalue (TREE_OPERAND (arg, 0)),
2910 TREE_OPERAND (arg, 1));
2912 case TRUTH_ANDIF_EXPR:
2913 return build2 (TRUTH_ORIF_EXPR, type,
2914 invert_truthvalue (TREE_OPERAND (arg, 0)),
2915 invert_truthvalue (TREE_OPERAND (arg, 1)));
2917 case TRUTH_ORIF_EXPR:
2918 return build2 (TRUTH_ANDIF_EXPR, type,
2919 invert_truthvalue (TREE_OPERAND (arg, 0)),
2920 invert_truthvalue (TREE_OPERAND (arg, 1)));
2922 case TRUTH_NOT_EXPR:
2923 return TREE_OPERAND (arg, 0);
2926 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2927 invert_truthvalue (TREE_OPERAND (arg, 1)),
2928 invert_truthvalue (TREE_OPERAND (arg, 2)));
2931 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2932 invert_truthvalue (TREE_OPERAND (arg, 1)));
2934 case NON_LVALUE_EXPR:
2935 return invert_truthvalue (TREE_OPERAND (arg, 0));
2938 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2943 return build1 (TREE_CODE (arg), type,
2944 invert_truthvalue (TREE_OPERAND (arg, 0)));
2947 if (!integer_onep (TREE_OPERAND (arg, 1)))
2949 return build2 (EQ_EXPR, type, arg,
2950 fold_convert (type, integer_zero_node));
2953 return build1 (TRUTH_NOT_EXPR, type, arg);
2955 case CLEANUP_POINT_EXPR:
2956 return build1 (CLEANUP_POINT_EXPR, type,
2957 invert_truthvalue (TREE_OPERAND (arg, 0)));
2962 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2964 return build1 (TRUTH_NOT_EXPR, type, arg);
2967 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2968 operands are another bit-wise operation with a common input. If so,
2969 distribute the bit operations to save an operation and possibly two if
2970 constants are involved. For example, convert
2971 (A | B) & (A | C) into A | (B & C)
2972 Further simplification will occur if B and C are constants.
2974 If this optimization cannot be done, 0 will be returned. */
2977 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2982 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2983 || TREE_CODE (arg0) == code
2984 || (TREE_CODE (arg0) != BIT_AND_EXPR
2985 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2988 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2990 common = TREE_OPERAND (arg0, 0);
2991 left = TREE_OPERAND (arg0, 1);
2992 right = TREE_OPERAND (arg1, 1);
2994 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2996 common = TREE_OPERAND (arg0, 0);
2997 left = TREE_OPERAND (arg0, 1);
2998 right = TREE_OPERAND (arg1, 0);
3000 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3002 common = TREE_OPERAND (arg0, 1);
3003 left = TREE_OPERAND (arg0, 0);
3004 right = TREE_OPERAND (arg1, 1);
3006 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3008 common = TREE_OPERAND (arg0, 1);
3009 left = TREE_OPERAND (arg0, 0);
3010 right = TREE_OPERAND (arg1, 0);
3015 return fold (build2 (TREE_CODE (arg0), type, common,
3016 fold (build2 (code, type, left, right))));
3019 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3020 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3023 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3026 tree result = build3 (BIT_FIELD_REF, type, inner,
3027 size_int (bitsize), bitsize_int (bitpos));
3029 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3034 /* Optimize a bit-field compare.
3036 There are two cases: First is a compare against a constant and the
3037 second is a comparison of two items where the fields are at the same
3038 bit position relative to the start of a chunk (byte, halfword, word)
3039 large enough to contain it. In these cases we can avoid the shift
3040 implicit in bitfield extractions.
3042 For constants, we emit a compare of the shifted constant with the
3043 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3044 compared. For two fields at the same position, we do the ANDs with the
3045 similar mask and compare the result of the ANDs.
3047 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3048 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3049 are the left and right operands of the comparison, respectively.
3051 If the optimization described above can be done, we return the resulting
3052 tree. Otherwise we return zero. */
3055 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3058 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3059 tree type = TREE_TYPE (lhs);
3060 tree signed_type, unsigned_type;
3061 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3062 enum machine_mode lmode, rmode, nmode;
3063 int lunsignedp, runsignedp;
3064 int lvolatilep = 0, rvolatilep = 0;
3065 tree linner, rinner = NULL_TREE;
3069 /* Get all the information about the extractions being done. If the bit size
3070 if the same as the size of the underlying object, we aren't doing an
3071 extraction at all and so can do nothing. We also don't want to
3072 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3073 then will no longer be able to replace it. */
3074 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3075 &lunsignedp, &lvolatilep);
3076 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3077 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3082 /* If this is not a constant, we can only do something if bit positions,
3083 sizes, and signedness are the same. */
3084 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3085 &runsignedp, &rvolatilep);
3087 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3088 || lunsignedp != runsignedp || offset != 0
3089 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3093 /* See if we can find a mode to refer to this field. We should be able to,
3094 but fail if we can't. */
3095 nmode = get_best_mode (lbitsize, lbitpos,
3096 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3097 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3098 TYPE_ALIGN (TREE_TYPE (rinner))),
3099 word_mode, lvolatilep || rvolatilep);
3100 if (nmode == VOIDmode)
3103 /* Set signed and unsigned types of the precision of this mode for the
3105 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3106 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3108 /* Compute the bit position and size for the new reference and our offset
3109 within it. If the new reference is the same size as the original, we
3110 won't optimize anything, so return zero. */
3111 nbitsize = GET_MODE_BITSIZE (nmode);
3112 nbitpos = lbitpos & ~ (nbitsize - 1);
3114 if (nbitsize == lbitsize)
3117 if (BYTES_BIG_ENDIAN)
3118 lbitpos = nbitsize - lbitsize - lbitpos;
3120 /* Make the mask to be used against the extracted field. */
3121 mask = build_int_cst (unsigned_type, -1);
3122 mask = force_fit_type (mask, 0, false, false);
3123 mask = fold_convert (unsigned_type, mask);
3124 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3125 mask = const_binop (RSHIFT_EXPR, mask,
3126 size_int (nbitsize - lbitsize - lbitpos), 0);
3129 /* If not comparing with constant, just rework the comparison
3131 return build2 (code, compare_type,
3132 build2 (BIT_AND_EXPR, unsigned_type,
3133 make_bit_field_ref (linner, unsigned_type,
3134 nbitsize, nbitpos, 1),
3136 build2 (BIT_AND_EXPR, unsigned_type,
3137 make_bit_field_ref (rinner, unsigned_type,
3138 nbitsize, nbitpos, 1),
3141 /* Otherwise, we are handling the constant case. See if the constant is too
3142 big for the field. Warn and return a tree of for 0 (false) if so. We do
3143 this not only for its own sake, but to avoid having to test for this
3144 error case below. If we didn't, we might generate wrong code.
3146 For unsigned fields, the constant shifted right by the field length should
3147 be all zero. For signed fields, the high-order bits should agree with
3152 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3153 fold_convert (unsigned_type, rhs),
3154 size_int (lbitsize), 0)))
3156 warning ("comparison is always %d due to width of bit-field",
3158 return constant_boolean_node (code == NE_EXPR, compare_type);
3163 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3164 size_int (lbitsize - 1), 0);
3165 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3167 warning ("comparison is always %d due to width of bit-field",
3169 return constant_boolean_node (code == NE_EXPR, compare_type);
3173 /* Single-bit compares should always be against zero. */
3174 if (lbitsize == 1 && ! integer_zerop (rhs))
3176 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3177 rhs = fold_convert (type, integer_zero_node);
3180 /* Make a new bitfield reference, shift the constant over the
3181 appropriate number of bits and mask it with the computed mask
3182 (in case this was a signed field). If we changed it, make a new one. */
3183 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3186 TREE_SIDE_EFFECTS (lhs) = 1;
3187 TREE_THIS_VOLATILE (lhs) = 1;
3190 rhs = fold (const_binop (BIT_AND_EXPR,
3191 const_binop (LSHIFT_EXPR,
3192 fold_convert (unsigned_type, rhs),
3193 size_int (lbitpos), 0),
3196 return build2 (code, compare_type,
3197 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3201 /* Subroutine for fold_truthop: decode a field reference.
3203 If EXP is a comparison reference, we return the innermost reference.
3205 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3206 set to the starting bit number.
3208 If the innermost field can be completely contained in a mode-sized
3209 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3211 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3212 otherwise it is not changed.
3214 *PUNSIGNEDP is set to the signedness of the field.
3216 *PMASK is set to the mask used. This is either contained in a
3217 BIT_AND_EXPR or derived from the width of the field.
3219 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3221 Return 0 if this is not a component reference or is one that we can't
3222 do anything with. */
3225 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3226 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3227 int *punsignedp, int *pvolatilep,
3228 tree *pmask, tree *pand_mask)
3230 tree outer_type = 0;
3232 tree mask, inner, offset;
3234 unsigned int precision;
3236 /* All the optimizations using this function assume integer fields.
3237 There are problems with FP fields since the type_for_size call
3238 below can fail for, e.g., XFmode. */
3239 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3242 /* We are interested in the bare arrangement of bits, so strip everything
3243 that doesn't affect the machine mode. However, record the type of the
3244 outermost expression if it may matter below. */
3245 if (TREE_CODE (exp) == NOP_EXPR
3246 || TREE_CODE (exp) == CONVERT_EXPR
3247 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3248 outer_type = TREE_TYPE (exp);
3251 if (TREE_CODE (exp) == BIT_AND_EXPR)
3253 and_mask = TREE_OPERAND (exp, 1);
3254 exp = TREE_OPERAND (exp, 0);
3255 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3256 if (TREE_CODE (and_mask) != INTEGER_CST)
3260 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3261 punsignedp, pvolatilep);
3262 if ((inner == exp && and_mask == 0)
3263 || *pbitsize < 0 || offset != 0
3264 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3267 /* If the number of bits in the reference is the same as the bitsize of
3268 the outer type, then the outer type gives the signedness. Otherwise
3269 (in case of a small bitfield) the signedness is unchanged. */
3270 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3271 *punsignedp = TYPE_UNSIGNED (outer_type);
3273 /* Compute the mask to access the bitfield. */
3274 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3275 precision = TYPE_PRECISION (unsigned_type);
3277 mask = build_int_cst (unsigned_type, -1);
3278 mask = force_fit_type (mask, 0, false, false);
3280 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3281 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3283 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3285 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3286 fold_convert (unsigned_type, and_mask), mask));
3289 *pand_mask = and_mask;
3293 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3297 all_ones_mask_p (tree mask, int size)
3299 tree type = TREE_TYPE (mask);
3300 unsigned int precision = TYPE_PRECISION (type);
3303 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3304 tmask = force_fit_type (tmask, 0, false, false);
3307 tree_int_cst_equal (mask,
3308 const_binop (RSHIFT_EXPR,
3309 const_binop (LSHIFT_EXPR, tmask,
3310 size_int (precision - size),
3312 size_int (precision - size), 0));
3315 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3316 represents the sign bit of EXP's type. If EXP represents a sign
3317 or zero extension, also test VAL against the unextended type.
3318 The return value is the (sub)expression whose sign bit is VAL,
3319 or NULL_TREE otherwise. */
3322 sign_bit_p (tree exp, tree val)
3324 unsigned HOST_WIDE_INT mask_lo, lo;
3325 HOST_WIDE_INT mask_hi, hi;
3329 /* Tree EXP must have an integral type. */
3330 t = TREE_TYPE (exp);
3331 if (! INTEGRAL_TYPE_P (t))
3334 /* Tree VAL must be an integer constant. */
3335 if (TREE_CODE (val) != INTEGER_CST
3336 || TREE_CONSTANT_OVERFLOW (val))
3339 width = TYPE_PRECISION (t);
3340 if (width > HOST_BITS_PER_WIDE_INT)
3342 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3345 mask_hi = ((unsigned HOST_WIDE_INT) -1
3346 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3352 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3355 mask_lo = ((unsigned HOST_WIDE_INT) -1
3356 >> (HOST_BITS_PER_WIDE_INT - width));
3359 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3360 treat VAL as if it were unsigned. */
3361 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3362 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3365 /* Handle extension from a narrower type. */
3366 if (TREE_CODE (exp) == NOP_EXPR
3367 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3368 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3373 /* Subroutine for fold_truthop: determine if an operand is simple enough
3374 to be evaluated unconditionally. */
3377 simple_operand_p (tree exp)
3379 /* Strip any conversions that don't change the machine mode. */
3380 while ((TREE_CODE (exp) == NOP_EXPR
3381 || TREE_CODE (exp) == CONVERT_EXPR)
3382 && (TYPE_MODE (TREE_TYPE (exp))
3383 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3384 exp = TREE_OPERAND (exp, 0);
3386 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3388 && ! TREE_ADDRESSABLE (exp)
3389 && ! TREE_THIS_VOLATILE (exp)
3390 && ! DECL_NONLOCAL (exp)
3391 /* Don't regard global variables as simple. They may be
3392 allocated in ways unknown to the compiler (shared memory,
3393 #pragma weak, etc). */
3394 && ! TREE_PUBLIC (exp)
3395 && ! DECL_EXTERNAL (exp)
3396 /* Loading a static variable is unduly expensive, but global
3397 registers aren't expensive. */
3398 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3401 /* The following functions are subroutines to fold_range_test and allow it to
3402 try to change a logical combination of comparisons into a range test.
3405 X == 2 || X == 3 || X == 4 || X == 5
3409 (unsigned) (X - 2) <= 3
3411 We describe each set of comparisons as being either inside or outside
3412 a range, using a variable named like IN_P, and then describe the
3413 range with a lower and upper bound. If one of the bounds is omitted,
3414 it represents either the highest or lowest value of the type.
3416 In the comments below, we represent a range by two numbers in brackets
3417 preceded by a "+" to designate being inside that range, or a "-" to
3418 designate being outside that range, so the condition can be inverted by
3419 flipping the prefix. An omitted bound is represented by a "-". For
3420 example, "- [-, 10]" means being outside the range starting at the lowest
3421 possible value and ending at 10, in other words, being greater than 10.
3422 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3425 We set up things so that the missing bounds are handled in a consistent
3426 manner so neither a missing bound nor "true" and "false" need to be
3427 handled using a special case. */
3429 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3430 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3431 and UPPER1_P are nonzero if the respective argument is an upper bound
3432 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3433 must be specified for a comparison. ARG1 will be converted to ARG0's
3434 type if both are specified. */
3437 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3438 tree arg1, int upper1_p)
3444 /* If neither arg represents infinity, do the normal operation.
3445 Else, if not a comparison, return infinity. Else handle the special
3446 comparison rules. Note that most of the cases below won't occur, but
3447 are handled for consistency. */
3449 if (arg0 != 0 && arg1 != 0)
3451 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3452 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3454 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3457 if (TREE_CODE_CLASS (code) != '<')
3460 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3461 for neither. In real maths, we cannot assume open ended ranges are
3462 the same. But, this is computer arithmetic, where numbers are finite.
3463 We can therefore make the transformation of any unbounded range with
3464 the value Z, Z being greater than any representable number. This permits
3465 us to treat unbounded ranges as equal. */
3466 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3467 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3471 result = sgn0 == sgn1;
3474 result = sgn0 != sgn1;
3477 result = sgn0 < sgn1;
3480 result = sgn0 <= sgn1;
3483 result = sgn0 > sgn1;
3486 result = sgn0 >= sgn1;
3492 return constant_boolean_node (result, type);
3495 /* Given EXP, a logical expression, set the range it is testing into
3496 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3497 actually being tested. *PLOW and *PHIGH will be made of the same type
3498 as the returned expression. If EXP is not a comparison, we will most
3499 likely not be returning a useful value and range. */
3502 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3504 enum tree_code code;
3505 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3506 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3508 tree low, high, n_low, n_high;
3510 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3511 and see if we can refine the range. Some of the cases below may not
3512 happen, but it doesn't seem worth worrying about this. We "continue"
3513 the outer loop when we've changed something; otherwise we "break"
3514 the switch, which will "break" the while. */
3517 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3521 code = TREE_CODE (exp);
3522 exp_type = TREE_TYPE (exp);
3524 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3526 if (first_rtl_op (code) > 0)
3527 arg0 = TREE_OPERAND (exp, 0);
3528 if (TREE_CODE_CLASS (code) == '<'
3529 || TREE_CODE_CLASS (code) == '1'
3530 || TREE_CODE_CLASS (code) == '2')
3531 arg0_type = TREE_TYPE (arg0);
3532 if (TREE_CODE_CLASS (code) == '2'
3533 || TREE_CODE_CLASS (code) == '<'
3534 || (TREE_CODE_CLASS (code) == 'e'
3535 && TREE_CODE_LENGTH (code) > 1))
3536 arg1 = TREE_OPERAND (exp, 1);
3541 case TRUTH_NOT_EXPR:
3542 in_p = ! in_p, exp = arg0;
3545 case EQ_EXPR: case NE_EXPR:
3546 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3547 /* We can only do something if the range is testing for zero
3548 and if the second operand is an integer constant. Note that
3549 saying something is "in" the range we make is done by
3550 complementing IN_P since it will set in the initial case of
3551 being not equal to zero; "out" is leaving it alone. */
3552 if (low == 0 || high == 0
3553 || ! integer_zerop (low) || ! integer_zerop (high)
3554 || TREE_CODE (arg1) != INTEGER_CST)
3559 case NE_EXPR: /* - [c, c] */
3562 case EQ_EXPR: /* + [c, c] */
3563 in_p = ! in_p, low = high = arg1;
3565 case GT_EXPR: /* - [-, c] */
3566 low = 0, high = arg1;
3568 case GE_EXPR: /* + [c, -] */
3569 in_p = ! in_p, low = arg1, high = 0;
3571 case LT_EXPR: /* - [c, -] */
3572 low = arg1, high = 0;
3574 case LE_EXPR: /* + [-, c] */
3575 in_p = ! in_p, low = 0, high = arg1;
3581 /* If this is an unsigned comparison, we also know that EXP is
3582 greater than or equal to zero. We base the range tests we make
3583 on that fact, so we record it here so we can parse existing
3584 range tests. We test arg0_type since often the return type
3585 of, e.g. EQ_EXPR, is boolean. */
3586 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3588 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3590 fold_convert (arg0_type, integer_zero_node),
3594 in_p = n_in_p, low = n_low, high = n_high;
3596 /* If the high bound is missing, but we have a nonzero low
3597 bound, reverse the range so it goes from zero to the low bound
3599 if (high == 0 && low && ! integer_zerop (low))
3602 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3603 integer_one_node, 0);
3604 low = fold_convert (arg0_type, integer_zero_node);
3612 /* (-x) IN [a,b] -> x in [-b, -a] */
3613 n_low = range_binop (MINUS_EXPR, exp_type,
3614 fold_convert (exp_type, integer_zero_node),
3616 n_high = range_binop (MINUS_EXPR, exp_type,
3617 fold_convert (exp_type, integer_zero_node),
3619 low = n_low, high = n_high;
3625 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3626 fold_convert (exp_type, integer_one_node));
3629 case PLUS_EXPR: case MINUS_EXPR:
3630 if (TREE_CODE (arg1) != INTEGER_CST)
3633 /* If EXP is signed, any overflow in the computation is undefined,
3634 so we don't worry about it so long as our computations on
3635 the bounds don't overflow. For unsigned, overflow is defined
3636 and this is exactly the right thing. */
3637 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3638 arg0_type, low, 0, arg1, 0);
3639 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3640 arg0_type, high, 1, arg1, 0);
3641 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3642 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3645 /* Check for an unsigned range which has wrapped around the maximum
3646 value thus making n_high < n_low, and normalize it. */
3647 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3649 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3650 integer_one_node, 0);
3651 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3652 integer_one_node, 0);
3654 /* If the range is of the form +/- [ x+1, x ], we won't
3655 be able to normalize it. But then, it represents the
3656 whole range or the empty set, so make it
3658 if (tree_int_cst_equal (n_low, low)
3659 && tree_int_cst_equal (n_high, high))
3665 low = n_low, high = n_high;
3670 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3671 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3674 if (! INTEGRAL_TYPE_P (arg0_type)
3675 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3676 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3679 n_low = low, n_high = high;
3682 n_low = fold_convert (arg0_type, n_low);
3685 n_high = fold_convert (arg0_type, n_high);
3688 /* If we're converting arg0 from an unsigned type, to exp,
3689 a signed type, we will be doing the comparison as unsigned.
3690 The tests above have already verified that LOW and HIGH
3693 So we have to ensure that we will handle large unsigned
3694 values the same way that the current signed bounds treat
3697 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3700 tree equiv_type = lang_hooks.types.type_for_mode
3701 (TYPE_MODE (arg0_type), 1);
3703 /* A range without an upper bound is, naturally, unbounded.
3704 Since convert would have cropped a very large value, use
3705 the max value for the destination type. */
3707 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3708 : TYPE_MAX_VALUE (arg0_type);
3710 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3711 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3712 fold_convert (arg0_type,
3714 fold_convert (arg0_type,
3715 integer_one_node)));
3717 /* If the low bound is specified, "and" the range with the
3718 range for which the original unsigned value will be
3722 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3723 1, n_low, n_high, 1,
3724 fold_convert (arg0_type,
3729 in_p = (n_in_p == in_p);
3733 /* Otherwise, "or" the range with the range of the input
3734 that will be interpreted as negative. */
3735 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3736 0, n_low, n_high, 1,
3737 fold_convert (arg0_type,
3742 in_p = (in_p != n_in_p);
3747 low = n_low, high = n_high;
3757 /* If EXP is a constant, we can evaluate whether this is true or false. */
3758 if (TREE_CODE (exp) == INTEGER_CST)
3760 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3762 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3768 *pin_p = in_p, *plow = low, *phigh = high;
3772 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3773 type, TYPE, return an expression to test if EXP is in (or out of, depending
3774 on IN_P) the range. Return 0 if the test couldn't be created. */
3777 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3779 tree etype = TREE_TYPE (exp);
3784 value = build_range_check (type, exp, 1, low, high);
3786 return invert_truthvalue (value);
3791 if (low == 0 && high == 0)
3792 return fold_convert (type, integer_one_node);
3795 return fold (build2 (LE_EXPR, type, exp, high));
3798 return fold (build2 (GE_EXPR, type, exp, low));
3800 if (operand_equal_p (low, high, 0))
3801 return fold (build2 (EQ_EXPR, type, exp, low));
3803 if (integer_zerop (low))
3805 if (! TYPE_UNSIGNED (etype))
3807 etype = lang_hooks.types.unsigned_type (etype);
3808 high = fold_convert (etype, high);
3809 exp = fold_convert (etype, exp);
3811 return build_range_check (type, exp, 1, 0, high);
3814 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3815 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3817 unsigned HOST_WIDE_INT lo;
3821 prec = TYPE_PRECISION (etype);
3822 if (prec <= HOST_BITS_PER_WIDE_INT)
3825 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3829 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3830 lo = (unsigned HOST_WIDE_INT) -1;
3833 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3835 if (TYPE_UNSIGNED (etype))
3837 etype = lang_hooks.types.signed_type (etype);
3838 exp = fold_convert (etype, exp);
3840 return fold (build2 (GT_EXPR, type, exp,
3841 fold_convert (etype, integer_zero_node)));
3845 value = const_binop (MINUS_EXPR, high, low, 0);
3846 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3848 tree utype, minv, maxv;
3850 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3851 for the type in question, as we rely on this here. */
3852 switch (TREE_CODE (etype))
3857 utype = lang_hooks.types.unsigned_type (etype);
3858 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3859 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3860 integer_one_node, 1);
3861 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3862 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3866 high = fold_convert (etype, high);
3867 low = fold_convert (etype, low);
3868 exp = fold_convert (etype, exp);
3869 value = const_binop (MINUS_EXPR, high, low, 0);
3877 if (value != 0 && ! TREE_OVERFLOW (value))
3878 return build_range_check (type,
3879 fold (build2 (MINUS_EXPR, etype, exp, low)),
3880 1, fold_convert (etype, integer_zero_node),
3886 /* Given two ranges, see if we can merge them into one. Return 1 if we
3887 can, 0 if we can't. Set the output range into the specified parameters. */
3890 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3891 tree high0, int in1_p, tree low1, tree high1)
3899 int lowequal = ((low0 == 0 && low1 == 0)
3900 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3901 low0, 0, low1, 0)));
3902 int highequal = ((high0 == 0 && high1 == 0)
3903 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3904 high0, 1, high1, 1)));
3906 /* Make range 0 be the range that starts first, or ends last if they
3907 start at the same value. Swap them if it isn't. */
3908 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3911 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3912 high1, 1, high0, 1))))
3914 temp = in0_p, in0_p = in1_p, in1_p = temp;
3915 tem = low0, low0 = low1, low1 = tem;
3916 tem = high0, high0 = high1, high1 = tem;
3919 /* Now flag two cases, whether the ranges are disjoint or whether the
3920 second range is totally subsumed in the first. Note that the tests
3921 below are simplified by the ones above. */
3922 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3923 high0, 1, low1, 0));
3924 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3925 high1, 1, high0, 1));
3927 /* We now have four cases, depending on whether we are including or
3928 excluding the two ranges. */
3931 /* If they don't overlap, the result is false. If the second range
3932 is a subset it is the result. Otherwise, the range is from the start
3933 of the second to the end of the first. */
3935 in_p = 0, low = high = 0;
3937 in_p = 1, low = low1, high = high1;
3939 in_p = 1, low = low1, high = high0;
3942 else if (in0_p && ! in1_p)
3944 /* If they don't overlap, the result is the first range. If they are
3945 equal, the result is false. If the second range is a subset of the
3946 first, and the ranges begin at the same place, we go from just after
3947 the end of the first range to the end of the second. If the second
3948 range is not a subset of the first, or if it is a subset and both
3949 ranges end at the same place, the range starts at the start of the
3950 first range and ends just before the second range.
3951 Otherwise, we can't describe this as a single range. */
3953 in_p = 1, low = low0, high = high0;
3954 else if (lowequal && highequal)
3955 in_p = 0, low = high = 0;
3956 else if (subset && lowequal)
3958 in_p = 1, high = high0;
3959 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3960 integer_one_node, 0);
3962 else if (! subset || highequal)
3964 in_p = 1, low = low0;
3965 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3966 integer_one_node, 0);
3972 else if (! in0_p && in1_p)
3974 /* If they don't overlap, the result is the second range. If the second
3975 is a subset of the first, the result is false. Otherwise,
3976 the range starts just after the first range and ends at the
3977 end of the second. */
3979 in_p = 1, low = low1, high = high1;
3980 else if (subset || highequal)
3981 in_p = 0, low = high = 0;
3984 in_p = 1, high = high1;
3985 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3986 integer_one_node, 0);
3992 /* The case where we are excluding both ranges. Here the complex case
3993 is if they don't overlap. In that case, the only time we have a
3994 range is if they are adjacent. If the second is a subset of the
3995 first, the result is the first. Otherwise, the range to exclude
3996 starts at the beginning of the first range and ends at the end of the
4000 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4001 range_binop (PLUS_EXPR, NULL_TREE,
4003 integer_one_node, 1),
4005 in_p = 0, low = low0, high = high1;
4008 /* Canonicalize - [min, x] into - [-, x]. */
4009 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4010 switch (TREE_CODE (TREE_TYPE (low0)))
4013 if (TYPE_PRECISION (TREE_TYPE (low0))
4014 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4019 if (tree_int_cst_equal (low0,
4020 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4024 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4025 && integer_zerop (low0))
4032 /* Canonicalize - [x, max] into - [x, -]. */
4033 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4034 switch (TREE_CODE (TREE_TYPE (high1)))
4037 if (TYPE_PRECISION (TREE_TYPE (high1))
4038 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4043 if (tree_int_cst_equal (high1,
4044 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4048 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4049 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4051 integer_one_node, 1)))
4058 /* The ranges might be also adjacent between the maximum and
4059 minimum values of the given type. For
4060 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4061 return + [x + 1, y - 1]. */
4062 if (low0 == 0 && high1 == 0)
4064 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4065 integer_one_node, 1);
4066 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4067 integer_one_node, 0);
4068 if (low == 0 || high == 0)
4078 in_p = 0, low = low0, high = high0;
4080 in_p = 0, low = low0, high = high1;
4083 *pin_p = in_p, *plow = low, *phigh = high;
4088 /* Subroutine of fold, looking inside expressions of the form
4089 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4090 of the COND_EXPR. This function is being used also to optimize
4091 A op B ? C : A, by reversing the comparison first.
4093 Return a folded expression whose code is not a COND_EXPR
4094 anymore, or NULL_TREE if no folding opportunity is found. */
4097 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4099 enum tree_code comp_code = TREE_CODE (arg0);
4100 tree arg00 = TREE_OPERAND (arg0, 0);
4101 tree arg01 = TREE_OPERAND (arg0, 1);
4102 tree arg1_type = TREE_TYPE (arg1);
4108 /* If we have A op 0 ? A : -A, consider applying the following
4111 A == 0? A : -A same as -A
4112 A != 0? A : -A same as A
4113 A >= 0? A : -A same as abs (A)
4114 A > 0? A : -A same as abs (A)
4115 A <= 0? A : -A same as -abs (A)
4116 A < 0? A : -A same as -abs (A)
4118 None of these transformations work for modes with signed
4119 zeros. If A is +/-0, the first two transformations will
4120 change the sign of the result (from +0 to -0, or vice
4121 versa). The last four will fix the sign of the result,
4122 even though the original expressions could be positive or
4123 negative, depending on the sign of A.
4125 Note that all these transformations are correct if A is
4126 NaN, since the two alternatives (A and -A) are also NaNs. */
4127 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4128 ? real_zerop (arg01)
4129 : integer_zerop (arg01))
4130 && TREE_CODE (arg2) == NEGATE_EXPR
4131 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4135 tem = fold_convert (arg1_type, arg1);
4136 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4138 return pedantic_non_lvalue (fold_convert (type, arg1));
4141 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4142 arg1 = fold_convert (lang_hooks.types.signed_type
4143 (TREE_TYPE (arg1)), arg1);
4144 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4145 return pedantic_non_lvalue (fold_convert (type, tem));
4148 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4149 arg1 = fold_convert (lang_hooks.types.signed_type
4150 (TREE_TYPE (arg1)), arg1);
4151 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4152 return negate_expr (fold_convert (type, tem));
4157 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4158 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4159 both transformations are correct when A is NaN: A != 0
4160 is then true, and A == 0 is false. */
4162 if (integer_zerop (arg01) && integer_zerop (arg2))
4164 if (comp_code == NE_EXPR)
4165 return pedantic_non_lvalue (fold_convert (type, arg1));
4166 else if (comp_code == EQ_EXPR)
4167 return fold_convert (type, integer_zero_node);
4170 /* Try some transformations of A op B ? A : B.
4172 A == B? A : B same as B
4173 A != B? A : B same as A
4174 A >= B? A : B same as max (A, B)
4175 A > B? A : B same as max (B, A)
4176 A <= B? A : B same as min (A, B)
4177 A < B? A : B same as min (B, A)
4179 As above, these transformations don't work in the presence
4180 of signed zeros. For example, if A and B are zeros of
4181 opposite sign, the first two transformations will change
4182 the sign of the result. In the last four, the original
4183 expressions give different results for (A=+0, B=-0) and
4184 (A=-0, B=+0), but the transformed expressions do not.
4186 The first two transformations are correct if either A or B
4187 is a NaN. In the first transformation, the condition will
4188 be false, and B will indeed be chosen. In the case of the
4189 second transformation, the condition A != B will be true,
4190 and A will be chosen.
4192 The conversions to max() and min() are not correct if B is
4193 a number and A is not. The conditions in the original
4194 expressions will be false, so all four give B. The min()
4195 and max() versions would give a NaN instead. */
4196 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4198 tree comp_op0 = arg00;
4199 tree comp_op1 = arg01;
4200 tree comp_type = TREE_TYPE (comp_op0);
4202 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4203 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4213 return pedantic_non_lvalue (fold_convert (type, arg2));
4215 return pedantic_non_lvalue (fold_convert (type, arg1));
4218 /* In C++ a ?: expression can be an lvalue, so put the
4219 operand which will be used if they are equal first
4220 so that we can convert this back to the
4221 corresponding COND_EXPR. */
4222 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4224 comp_op0 = fold_convert (comp_type, comp_op0);
4225 comp_op1 = fold_convert (comp_type, comp_op1);
4226 tem = fold (build2 (MIN_EXPR, comp_type,
4227 (comp_code == LE_EXPR
4228 ? comp_op0 : comp_op1),
4229 (comp_code == LE_EXPR
4230 ? comp_op1 : comp_op0)));
4231 return pedantic_non_lvalue (fold_convert (type, tem));
4236 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4238 comp_op0 = fold_convert (comp_type, comp_op0);
4239 comp_op1 = fold_convert (comp_type, comp_op1);
4240 tem = fold (build2 (MAX_EXPR, comp_type,
4241 (comp_code == GE_EXPR
4242 ? comp_op0 : comp_op1),
4243 (comp_code == GE_EXPR
4244 ? comp_op1 : comp_op0)));
4245 tem = fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1));
4246 return pedantic_non_lvalue (fold_convert (type, tem));
4254 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4255 we might still be able to simplify this. For example,
4256 if C1 is one less or one more than C2, this might have started
4257 out as a MIN or MAX and been transformed by this function.
4258 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4260 if (INTEGRAL_TYPE_P (type)
4261 && TREE_CODE (arg01) == INTEGER_CST
4262 && TREE_CODE (arg2) == INTEGER_CST)
4266 /* We can replace A with C1 in this case. */
4267 arg1 = fold_convert (type, arg01);
4268 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4271 /* If C1 is C2 + 1, this is min(A, C2). */
4272 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4274 && operand_equal_p (arg01,
4275 const_binop (PLUS_EXPR, arg2,
4276 integer_one_node, 0),
4278 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4279 type, arg1, arg2)));
4283 /* If C1 is C2 - 1, this is min(A, C2). */
4284 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4286 && operand_equal_p (arg01,
4287 const_binop (MINUS_EXPR, arg2,
4288 integer_one_node, 0),
4290 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4291 type, arg1, arg2)));
4295 /* If C1 is C2 - 1, this is max(A, C2). */
4296 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4298 && operand_equal_p (arg01,
4299 const_binop (MINUS_EXPR, arg2,
4300 integer_one_node, 0),
4302 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4303 type, arg1, arg2)));
4307 /* If C1 is C2 + 1, this is max(A, C2). */
4308 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4310 && operand_equal_p (arg01,
4311 const_binop (PLUS_EXPR, arg2,
4312 integer_one_node, 0),
4314 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4315 type, arg1, arg2)));
4328 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4329 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4332 /* EXP is some logical combination of boolean tests. See if we can
4333 merge it into some range test. Return the new tree if so. */
4336 fold_range_test (tree exp)
4338 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4339 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4340 int in0_p, in1_p, in_p;
4341 tree low0, low1, low, high0, high1, high;
4342 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4343 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4346 /* If this is an OR operation, invert both sides; we will invert
4347 again at the end. */
4349 in0_p = ! in0_p, in1_p = ! in1_p;
4351 /* If both expressions are the same, if we can merge the ranges, and we
4352 can build the range test, return it or it inverted. If one of the
4353 ranges is always true or always false, consider it to be the same
4354 expression as the other. */
4355 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4356 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4358 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4360 : rhs != 0 ? rhs : integer_zero_node,
4362 return or_op ? invert_truthvalue (tem) : tem;
4364 /* On machines where the branch cost is expensive, if this is a
4365 short-circuited branch and the underlying object on both sides
4366 is the same, make a non-short-circuit operation. */
4367 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4368 && lhs != 0 && rhs != 0
4369 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4370 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4371 && operand_equal_p (lhs, rhs, 0))
4373 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4374 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4375 which cases we can't do this. */
4376 if (simple_operand_p (lhs))
4377 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4378 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4379 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4380 TREE_OPERAND (exp, 1));
4382 else if (lang_hooks.decls.global_bindings_p () == 0
4383 && ! CONTAINS_PLACEHOLDER_P (lhs))
4385 tree common = save_expr (lhs);
4387 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4388 or_op ? ! in0_p : in0_p,
4390 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4391 or_op ? ! in1_p : in1_p,
4393 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4394 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4395 TREE_TYPE (exp), lhs, rhs);
4402 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4403 bit value. Arrange things so the extra bits will be set to zero if and
4404 only if C is signed-extended to its full width. If MASK is nonzero,
4405 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4408 unextend (tree c, int p, int unsignedp, tree mask)
4410 tree type = TREE_TYPE (c);
4411 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4414 if (p == modesize || unsignedp)
4417 /* We work by getting just the sign bit into the low-order bit, then
4418 into the high-order bit, then sign-extend. We then XOR that value
4420 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4421 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4423 /* We must use a signed type in order to get an arithmetic right shift.
4424 However, we must also avoid introducing accidental overflows, so that
4425 a subsequent call to integer_zerop will work. Hence we must
4426 do the type conversion here. At this point, the constant is either
4427 zero or one, and the conversion to a signed type can never overflow.
4428 We could get an overflow if this conversion is done anywhere else. */
4429 if (TYPE_UNSIGNED (type))
4430 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4432 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4433 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4435 temp = const_binop (BIT_AND_EXPR, temp,
4436 fold_convert (TREE_TYPE (c), mask), 0);
4437 /* If necessary, convert the type back to match the type of C. */
4438 if (TYPE_UNSIGNED (type))
4439 temp = fold_convert (type, temp);
4441 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4444 /* Find ways of folding logical expressions of LHS and RHS:
4445 Try to merge two comparisons to the same innermost item.
4446 Look for range tests like "ch >= '0' && ch <= '9'".
4447 Look for combinations of simple terms on machines with expensive branches
4448 and evaluate the RHS unconditionally.
4450 For example, if we have p->a == 2 && p->b == 4 and we can make an
4451 object large enough to span both A and B, we can do this with a comparison
4452 against the object ANDed with the a mask.
4454 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4455 operations to do this with one comparison.
4457 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4458 function and the one above.
4460 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4461 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4463 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4466 We return the simplified tree or 0 if no optimization is possible. */
4469 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4471 /* If this is the "or" of two comparisons, we can do something if
4472 the comparisons are NE_EXPR. If this is the "and", we can do something
4473 if the comparisons are EQ_EXPR. I.e.,
4474 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4476 WANTED_CODE is this operation code. For single bit fields, we can
4477 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4478 comparison for one-bit fields. */
4480 enum tree_code wanted_code;
4481 enum tree_code lcode, rcode;
4482 tree ll_arg, lr_arg, rl_arg, rr_arg;
4483 tree ll_inner, lr_inner, rl_inner, rr_inner;
4484 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4485 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4486 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4487 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4488 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4489 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4490 enum machine_mode lnmode, rnmode;
4491 tree ll_mask, lr_mask, rl_mask, rr_mask;
4492 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4493 tree l_const, r_const;
4494 tree lntype, rntype, result;
4495 int first_bit, end_bit;
4498 /* Start by getting the comparison codes. Fail if anything is volatile.
4499 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4500 it were surrounded with a NE_EXPR. */
4502 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4505 lcode = TREE_CODE (lhs);
4506 rcode = TREE_CODE (rhs);
4508 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4510 lhs = build2 (NE_EXPR, truth_type, lhs,
4511 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4515 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4517 rhs = build2 (NE_EXPR, truth_type, rhs,
4518 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4522 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4525 ll_arg = TREE_OPERAND (lhs, 0);
4526 lr_arg = TREE_OPERAND (lhs, 1);
4527 rl_arg = TREE_OPERAND (rhs, 0);
4528 rr_arg = TREE_OPERAND (rhs, 1);
4530 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4531 if (simple_operand_p (ll_arg)
4532 && simple_operand_p (lr_arg))
4535 if (operand_equal_p (ll_arg, rl_arg, 0)
4536 && operand_equal_p (lr_arg, rr_arg, 0))
4538 result = combine_comparisons (code, lcode, rcode,
4539 truth_type, ll_arg, lr_arg);
4543 else if (operand_equal_p (ll_arg, rr_arg, 0)
4544 && operand_equal_p (lr_arg, rl_arg, 0))
4546 result = combine_comparisons (code, lcode,
4547 swap_tree_comparison (rcode),
4548 truth_type, ll_arg, lr_arg);
4554 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4555 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4557 /* If the RHS can be evaluated unconditionally and its operands are
4558 simple, it wins to evaluate the RHS unconditionally on machines
4559 with expensive branches. In this case, this isn't a comparison
4560 that can be merged. Avoid doing this if the RHS is a floating-point
4561 comparison since those can trap. */
4563 if (BRANCH_COST >= 2
4564 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4565 && simple_operand_p (rl_arg)
4566 && simple_operand_p (rr_arg))
4568 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4569 if (code == TRUTH_OR_EXPR
4570 && lcode == NE_EXPR && integer_zerop (lr_arg)
4571 && rcode == NE_EXPR && integer_zerop (rr_arg)
4572 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4573 return build2 (NE_EXPR, truth_type,
4574 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4576 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4578 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4579 if (code == TRUTH_AND_EXPR
4580 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4581 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4582 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4583 return build2 (EQ_EXPR, truth_type,
4584 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4586 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4588 return build2 (code, truth_type, lhs, rhs);
4591 /* See if the comparisons can be merged. Then get all the parameters for
4594 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4595 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4599 ll_inner = decode_field_reference (ll_arg,
4600 &ll_bitsize, &ll_bitpos, &ll_mode,
4601 &ll_unsignedp, &volatilep, &ll_mask,
4603 lr_inner = decode_field_reference (lr_arg,
4604 &lr_bitsize, &lr_bitpos, &lr_mode,
4605 &lr_unsignedp, &volatilep, &lr_mask,
4607 rl_inner = decode_field_reference (rl_arg,
4608 &rl_bitsize, &rl_bitpos, &rl_mode,
4609 &rl_unsignedp, &volatilep, &rl_mask,
4611 rr_inner = decode_field_reference (rr_arg,
4612 &rr_bitsize, &rr_bitpos, &rr_mode,
4613 &rr_unsignedp, &volatilep, &rr_mask,
4616 /* It must be true that the inner operation on the lhs of each
4617 comparison must be the same if we are to be able to do anything.
4618 Then see if we have constants. If not, the same must be true for
4620 if (volatilep || ll_inner == 0 || rl_inner == 0
4621 || ! operand_equal_p (ll_inner, rl_inner, 0))
4624 if (TREE_CODE (lr_arg) == INTEGER_CST
4625 && TREE_CODE (rr_arg) == INTEGER_CST)
4626 l_const = lr_arg, r_const = rr_arg;
4627 else if (lr_inner == 0 || rr_inner == 0
4628 || ! operand_equal_p (lr_inner, rr_inner, 0))
4631 l_const = r_const = 0;
4633 /* If either comparison code is not correct for our logical operation,
4634 fail. However, we can convert a one-bit comparison against zero into
4635 the opposite comparison against that bit being set in the field. */
4637 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4638 if (lcode != wanted_code)
4640 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4642 /* Make the left operand unsigned, since we are only interested
4643 in the value of one bit. Otherwise we are doing the wrong
4652 /* This is analogous to the code for l_const above. */
4653 if (rcode != wanted_code)
4655 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4664 /* After this point all optimizations will generate bit-field
4665 references, which we might not want. */
4666 if (! lang_hooks.can_use_bit_fields_p ())
4669 /* See if we can find a mode that contains both fields being compared on
4670 the left. If we can't, fail. Otherwise, update all constants and masks
4671 to be relative to a field of that size. */
4672 first_bit = MIN (ll_bitpos, rl_bitpos);
4673 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4674 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4675 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4677 if (lnmode == VOIDmode)
4680 lnbitsize = GET_MODE_BITSIZE (lnmode);
4681 lnbitpos = first_bit & ~ (lnbitsize - 1);
4682 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4683 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4685 if (BYTES_BIG_ENDIAN)
4687 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4688 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4691 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4692 size_int (xll_bitpos), 0);
4693 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4694 size_int (xrl_bitpos), 0);
4698 l_const = fold_convert (lntype, l_const);
4699 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4700 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4701 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4702 fold (build1 (BIT_NOT_EXPR,
4706 warning ("comparison is always %d", wanted_code == NE_EXPR);
4708 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4713 r_const = fold_convert (lntype, r_const);
4714 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4715 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4716 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4717 fold (build1 (BIT_NOT_EXPR,
4721 warning ("comparison is always %d", wanted_code == NE_EXPR);
4723 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4727 /* If the right sides are not constant, do the same for it. Also,
4728 disallow this optimization if a size or signedness mismatch occurs
4729 between the left and right sides. */
4732 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4733 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4734 /* Make sure the two fields on the right
4735 correspond to the left without being swapped. */
4736 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4739 first_bit = MIN (lr_bitpos, rr_bitpos);
4740 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4741 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4742 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4744 if (rnmode == VOIDmode)
4747 rnbitsize = GET_MODE_BITSIZE (rnmode);
4748 rnbitpos = first_bit & ~ (rnbitsize - 1);
4749 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4750 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4752 if (BYTES_BIG_ENDIAN)
4754 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4755 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4758 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4759 size_int (xlr_bitpos), 0);
4760 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4761 size_int (xrr_bitpos), 0);
4763 /* Make a mask that corresponds to both fields being compared.
4764 Do this for both items being compared. If the operands are the
4765 same size and the bits being compared are in the same position
4766 then we can do this by masking both and comparing the masked
4768 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4769 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4770 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4772 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4773 ll_unsignedp || rl_unsignedp);
4774 if (! all_ones_mask_p (ll_mask, lnbitsize))
4775 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4777 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4778 lr_unsignedp || rr_unsignedp);
4779 if (! all_ones_mask_p (lr_mask, rnbitsize))
4780 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4782 return build2 (wanted_code, truth_type, lhs, rhs);
4785 /* There is still another way we can do something: If both pairs of
4786 fields being compared are adjacent, we may be able to make a wider
4787 field containing them both.
4789 Note that we still must mask the lhs/rhs expressions. Furthermore,
4790 the mask must be shifted to account for the shift done by
4791 make_bit_field_ref. */
4792 if ((ll_bitsize + ll_bitpos == rl_bitpos
4793 && lr_bitsize + lr_bitpos == rr_bitpos)
4794 || (ll_bitpos == rl_bitpos + rl_bitsize
4795 && lr_bitpos == rr_bitpos + rr_bitsize))
4799 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4800 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4801 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4802 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4804 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4805 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4806 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4807 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4809 /* Convert to the smaller type before masking out unwanted bits. */
4811 if (lntype != rntype)
4813 if (lnbitsize > rnbitsize)
4815 lhs = fold_convert (rntype, lhs);
4816 ll_mask = fold_convert (rntype, ll_mask);
4819 else if (lnbitsize < rnbitsize)
4821 rhs = fold_convert (lntype, rhs);
4822 lr_mask = fold_convert (lntype, lr_mask);
4827 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4828 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4830 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4831 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4833 return build2 (wanted_code, truth_type, lhs, rhs);
4839 /* Handle the case of comparisons with constants. If there is something in
4840 common between the masks, those bits of the constants must be the same.
4841 If not, the condition is always false. Test for this to avoid generating
4842 incorrect code below. */
4843 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4844 if (! integer_zerop (result)
4845 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4846 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4848 if (wanted_code == NE_EXPR)
4850 warning ("`or' of unmatched not-equal tests is always 1");
4851 return constant_boolean_node (true, truth_type);
4855 warning ("`and' of mutually exclusive equal-tests is always 0");
4856 return constant_boolean_node (false, truth_type);
4860 /* Construct the expression we will return. First get the component
4861 reference we will make. Unless the mask is all ones the width of
4862 that field, perform the mask operation. Then compare with the
4864 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4865 ll_unsignedp || rl_unsignedp);
4867 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4868 if (! all_ones_mask_p (ll_mask, lnbitsize))
4869 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4871 return build2 (wanted_code, truth_type, result,
4872 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4875 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4879 optimize_minmax_comparison (tree t)
4881 tree type = TREE_TYPE (t);
4882 tree arg0 = TREE_OPERAND (t, 0);
4883 enum tree_code op_code;
4884 tree comp_const = TREE_OPERAND (t, 1);
4886 int consts_equal, consts_lt;
4889 STRIP_SIGN_NOPS (arg0);
4891 op_code = TREE_CODE (arg0);
4892 minmax_const = TREE_OPERAND (arg0, 1);
4893 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4894 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4895 inner = TREE_OPERAND (arg0, 0);
4897 /* If something does not permit us to optimize, return the original tree. */
4898 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4899 || TREE_CODE (comp_const) != INTEGER_CST
4900 || TREE_CONSTANT_OVERFLOW (comp_const)
4901 || TREE_CODE (minmax_const) != INTEGER_CST
4902 || TREE_CONSTANT_OVERFLOW (minmax_const))
4905 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4906 and GT_EXPR, doing the rest with recursive calls using logical
4908 switch (TREE_CODE (t))
4910 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4912 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4916 fold (build2 (TRUTH_ORIF_EXPR, type,
4917 optimize_minmax_comparison
4918 (build2 (EQ_EXPR, type, arg0, comp_const)),
4919 optimize_minmax_comparison
4920 (build2 (GT_EXPR, type, arg0, comp_const))));
4923 if (op_code == MAX_EXPR && consts_equal)
4924 /* MAX (X, 0) == 0 -> X <= 0 */
4925 return fold (build2 (LE_EXPR, type, inner, comp_const));
4927 else if (op_code == MAX_EXPR && consts_lt)
4928 /* MAX (X, 0) == 5 -> X == 5 */
4929 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4931 else if (op_code == MAX_EXPR)
4932 /* MAX (X, 0) == -1 -> false */
4933 return omit_one_operand (type, integer_zero_node, inner);
4935 else if (consts_equal)
4936 /* MIN (X, 0) == 0 -> X >= 0 */
4937 return fold (build2 (GE_EXPR, type, inner, comp_const));
4940 /* MIN (X, 0) == 5 -> false */
4941 return omit_one_operand (type, integer_zero_node, inner);
4944 /* MIN (X, 0) == -1 -> X == -1 */
4945 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4948 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4949 /* MAX (X, 0) > 0 -> X > 0
4950 MAX (X, 0) > 5 -> X > 5 */
4951 return fold (build2 (GT_EXPR, type, inner, comp_const));
4953 else if (op_code == MAX_EXPR)
4954 /* MAX (X, 0) > -1 -> true */
4955 return omit_one_operand (type, integer_one_node, inner);
4957 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4958 /* MIN (X, 0) > 0 -> false
4959 MIN (X, 0) > 5 -> false */
4960 return omit_one_operand (type, integer_zero_node, inner);
4963 /* MIN (X, 0) > -1 -> X > -1 */
4964 return fold (build2 (GT_EXPR, type, inner, comp_const));
4971 /* T is an integer expression that is being multiplied, divided, or taken a
4972 modulus (CODE says which and what kind of divide or modulus) by a
4973 constant C. See if we can eliminate that operation by folding it with
4974 other operations already in T. WIDE_TYPE, if non-null, is a type that
4975 should be used for the computation if wider than our type.
4977 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4978 (X * 2) + (Y * 4). We must, however, be assured that either the original
4979 expression would not overflow or that overflow is undefined for the type
4980 in the language in question.
4982 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4983 the machine has a multiply-accumulate insn or that this is part of an
4984 addressing calculation.
4986 If we return a non-null expression, it is an equivalent form of the
4987 original computation, but need not be in the original type. */
4990 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4992 /* To avoid exponential search depth, refuse to allow recursion past
4993 three levels. Beyond that (1) it's highly unlikely that we'll find
4994 something interesting and (2) we've probably processed it before
4995 when we built the inner expression. */
5004 ret = extract_muldiv_1 (t, c, code, wide_type);
5011 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5013 tree type = TREE_TYPE (t);
5014 enum tree_code tcode = TREE_CODE (t);
5015 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5016 > GET_MODE_SIZE (TYPE_MODE (type)))
5017 ? wide_type : type);
5019 int same_p = tcode == code;
5020 tree op0 = NULL_TREE, op1 = NULL_TREE;
5022 /* Don't deal with constants of zero here; they confuse the code below. */
5023 if (integer_zerop (c))
5026 if (TREE_CODE_CLASS (tcode) == '1')
5027 op0 = TREE_OPERAND (t, 0);
5029 if (TREE_CODE_CLASS (tcode) == '2')
5030 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5032 /* Note that we need not handle conditional operations here since fold
5033 already handles those cases. So just do arithmetic here. */
5037 /* For a constant, we can always simplify if we are a multiply
5038 or (for divide and modulus) if it is a multiple of our constant. */
5039 if (code == MULT_EXPR
5040 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5041 return const_binop (code, fold_convert (ctype, t),
5042 fold_convert (ctype, c), 0);
5045 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5046 /* If op0 is an expression ... */
5047 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5048 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5049 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5050 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5051 /* ... and is unsigned, and its type is smaller than ctype,
5052 then we cannot pass through as widening. */
5053 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5054 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5055 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5056 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5057 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5058 /* ... or this is a truncation (t is narrower than op0),
5059 then we cannot pass through this narrowing. */
5060 || (GET_MODE_SIZE (TYPE_MODE (type))
5061 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5062 /* ... or signedness changes for division or modulus,
5063 then we cannot pass through this conversion. */
5064 || (code != MULT_EXPR
5065 && (TYPE_UNSIGNED (ctype)
5066 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5069 /* Pass the constant down and see if we can make a simplification. If
5070 we can, replace this expression with the inner simplification for
5071 possible later conversion to our or some other type. */
5072 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5073 && TREE_CODE (t2) == INTEGER_CST
5074 && ! TREE_CONSTANT_OVERFLOW (t2)
5075 && (0 != (t1 = extract_muldiv (op0, t2, code,
5077 ? ctype : NULL_TREE))))
5081 case NEGATE_EXPR: case ABS_EXPR:
5082 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5083 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5086 case MIN_EXPR: case MAX_EXPR:
5087 /* If widening the type changes the signedness, then we can't perform
5088 this optimization as that changes the result. */
5089 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5092 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5093 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5094 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5096 if (tree_int_cst_sgn (c) < 0)
5097 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5099 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5100 fold_convert (ctype, t2)));
5104 case LSHIFT_EXPR: case RSHIFT_EXPR:
5105 /* If the second operand is constant, this is a multiplication
5106 or floor division, by a power of two, so we can treat it that
5107 way unless the multiplier or divisor overflows. Signed
5108 left-shift overflow is implementation-defined rather than
5109 undefined in C90, so do not convert signed left shift into
5111 if (TREE_CODE (op1) == INTEGER_CST
5112 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5113 /* const_binop may not detect overflow correctly,
5114 so check for it explicitly here. */
5115 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5116 && TREE_INT_CST_HIGH (op1) == 0
5117 && 0 != (t1 = fold_convert (ctype,
5118 const_binop (LSHIFT_EXPR,
5121 && ! TREE_OVERFLOW (t1))
5122 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5123 ? MULT_EXPR : FLOOR_DIV_EXPR,
5124 ctype, fold_convert (ctype, op0), t1),
5125 c, code, wide_type);
5128 case PLUS_EXPR: case MINUS_EXPR:
5129 /* See if we can eliminate the operation on both sides. If we can, we
5130 can return a new PLUS or MINUS. If we can't, the only remaining
5131 cases where we can do anything are if the second operand is a
5133 t1 = extract_muldiv (op0, c, code, wide_type);
5134 t2 = extract_muldiv (op1, c, code, wide_type);
5135 if (t1 != 0 && t2 != 0
5136 && (code == MULT_EXPR
5137 /* If not multiplication, we can only do this if both operands
5138 are divisible by c. */
5139 || (multiple_of_p (ctype, op0, c)
5140 && multiple_of_p (ctype, op1, c))))
5141 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5142 fold_convert (ctype, t2)));
5144 /* If this was a subtraction, negate OP1 and set it to be an addition.
5145 This simplifies the logic below. */
5146 if (tcode == MINUS_EXPR)
5147 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5149 if (TREE_CODE (op1) != INTEGER_CST)
5152 /* If either OP1 or C are negative, this optimization is not safe for
5153 some of the division and remainder types while for others we need
5154 to change the code. */
5155 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5157 if (code == CEIL_DIV_EXPR)
5158 code = FLOOR_DIV_EXPR;
5159 else if (code == FLOOR_DIV_EXPR)
5160 code = CEIL_DIV_EXPR;
5161 else if (code != MULT_EXPR
5162 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5166 /* If it's a multiply or a division/modulus operation of a multiple
5167 of our constant, do the operation and verify it doesn't overflow. */
5168 if (code == MULT_EXPR
5169 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5171 op1 = const_binop (code, fold_convert (ctype, op1),
5172 fold_convert (ctype, c), 0);
5173 /* We allow the constant to overflow with wrapping semantics. */
5175 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5181 /* If we have an unsigned type is not a sizetype, we cannot widen
5182 the operation since it will change the result if the original
5183 computation overflowed. */
5184 if (TYPE_UNSIGNED (ctype)
5185 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5189 /* If we were able to eliminate our operation from the first side,
5190 apply our operation to the second side and reform the PLUS. */
5191 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5192 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5194 /* The last case is if we are a multiply. In that case, we can
5195 apply the distributive law to commute the multiply and addition
5196 if the multiplication of the constants doesn't overflow. */
5197 if (code == MULT_EXPR)
5198 return fold (build2 (tcode, ctype,
5199 fold (build2 (code, ctype,
5200 fold_convert (ctype, op0),
5201 fold_convert (ctype, c))),
5207 /* We have a special case here if we are doing something like
5208 (C * 8) % 4 since we know that's zero. */
5209 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5210 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5211 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5212 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5213 return omit_one_operand (type, integer_zero_node, op0);
5215 /* ... fall through ... */
5217 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5218 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5219 /* If we can extract our operation from the LHS, do so and return a
5220 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5221 do something only if the second operand is a constant. */
5223 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5224 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5225 fold_convert (ctype, op1)));
5226 else if (tcode == MULT_EXPR && code == MULT_EXPR
5227 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5228 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5229 fold_convert (ctype, t1)));
5230 else if (TREE_CODE (op1) != INTEGER_CST)
5233 /* If these are the same operation types, we can associate them
5234 assuming no overflow. */
5236 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5237 fold_convert (ctype, c), 0))
5238 && ! TREE_OVERFLOW (t1))
5239 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5241 /* If these operations "cancel" each other, we have the main
5242 optimizations of this pass, which occur when either constant is a
5243 multiple of the other, in which case we replace this with either an
5244 operation or CODE or TCODE.
5246 If we have an unsigned type that is not a sizetype, we cannot do
5247 this since it will change the result if the original computation
5249 if ((! TYPE_UNSIGNED (ctype)
5250 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5252 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5253 || (tcode == MULT_EXPR
5254 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5255 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5257 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5258 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5259 fold_convert (ctype,
5260 const_binop (TRUNC_DIV_EXPR,
5262 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5263 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5264 fold_convert (ctype,
5265 const_binop (TRUNC_DIV_EXPR,
5277 /* Return a node which has the indicated constant VALUE (either 0 or
5278 1), and is of the indicated TYPE. */
5281 constant_boolean_node (int value, tree type)
5283 if (type == integer_type_node)
5284 return value ? integer_one_node : integer_zero_node;
5285 else if (type == boolean_type_node)
5286 return value ? boolean_true_node : boolean_false_node;
5287 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5288 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5289 : integer_zero_node);
5291 return build_int_cst (type, value);
5294 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5295 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5296 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5297 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5298 COND is the first argument to CODE; otherwise (as in the example
5299 given here), it is the second argument. TYPE is the type of the
5300 original expression. Return NULL_TREE if no simplification is
5304 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5305 tree cond, tree arg, int cond_first_p)
5307 tree test, true_value, false_value;
5308 tree lhs = NULL_TREE;
5309 tree rhs = NULL_TREE;
5311 /* This transformation is only worthwhile if we don't have to wrap
5312 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5313 one of the branches once its pushed inside the COND_EXPR. */
5314 if (!TREE_CONSTANT (arg))
5317 if (TREE_CODE (cond) == COND_EXPR)
5319 test = TREE_OPERAND (cond, 0);
5320 true_value = TREE_OPERAND (cond, 1);
5321 false_value = TREE_OPERAND (cond, 2);
5322 /* If this operand throws an expression, then it does not make
5323 sense to try to perform a logical or arithmetic operation
5325 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5327 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5332 tree testtype = TREE_TYPE (cond);
5334 true_value = constant_boolean_node (true, testtype);
5335 false_value = constant_boolean_node (false, testtype);
5339 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5340 : build2 (code, type, arg, true_value));
5342 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5343 : build2 (code, type, arg, false_value));
5345 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5346 return fold_convert (type, test);
5350 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5352 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5353 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5354 ADDEND is the same as X.
5356 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5357 and finite. The problematic cases are when X is zero, and its mode
5358 has signed zeros. In the case of rounding towards -infinity,
5359 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5360 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5363 fold_real_zero_addition_p (tree type, tree addend, int negate)
5365 if (!real_zerop (addend))
5368 /* Don't allow the fold with -fsignaling-nans. */
5369 if (HONOR_SNANS (TYPE_MODE (type)))
5372 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5373 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5376 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5377 if (TREE_CODE (addend) == REAL_CST
5378 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5381 /* The mode has signed zeros, and we have to honor their sign.
5382 In this situation, there is only one case we can return true for.
5383 X - 0 is the same as X unless rounding towards -infinity is
5385 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5388 /* Subroutine of fold() that checks comparisons of built-in math
5389 functions against real constants.
5391 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5392 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5393 is the type of the result and ARG0 and ARG1 are the operands of the
5394 comparison. ARG1 must be a TREE_REAL_CST.
5396 The function returns the constant folded tree if a simplification
5397 can be made, and NULL_TREE otherwise. */
5400 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5401 tree type, tree arg0, tree arg1)
5405 if (BUILTIN_SQRT_P (fcode))
5407 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5408 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5410 c = TREE_REAL_CST (arg1);
5411 if (REAL_VALUE_NEGATIVE (c))
5413 /* sqrt(x) < y is always false, if y is negative. */
5414 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5415 return omit_one_operand (type, integer_zero_node, arg);
5417 /* sqrt(x) > y is always true, if y is negative and we
5418 don't care about NaNs, i.e. negative values of x. */
5419 if (code == NE_EXPR || !HONOR_NANS (mode))
5420 return omit_one_operand (type, integer_one_node, arg);
5422 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5423 return fold (build2 (GE_EXPR, type, arg,
5424 build_real (TREE_TYPE (arg), dconst0)));
5426 else if (code == GT_EXPR || code == GE_EXPR)
5430 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5431 real_convert (&c2, mode, &c2);
5433 if (REAL_VALUE_ISINF (c2))
5435 /* sqrt(x) > y is x == +Inf, when y is very large. */
5436 if (HONOR_INFINITIES (mode))
5437 return fold (build2 (EQ_EXPR, type, arg,
5438 build_real (TREE_TYPE (arg), c2)));
5440 /* sqrt(x) > y is always false, when y is very large
5441 and we don't care about infinities. */
5442 return omit_one_operand (type, integer_zero_node, arg);
5445 /* sqrt(x) > c is the same as x > c*c. */
5446 return fold (build2 (code, type, arg,
5447 build_real (TREE_TYPE (arg), c2)));
5449 else if (code == LT_EXPR || code == LE_EXPR)
5453 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5454 real_convert (&c2, mode, &c2);
5456 if (REAL_VALUE_ISINF (c2))
5458 /* sqrt(x) < y is always true, when y is a very large
5459 value and we don't care about NaNs or Infinities. */
5460 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5461 return omit_one_operand (type, integer_one_node, arg);
5463 /* sqrt(x) < y is x != +Inf when y is very large and we
5464 don't care about NaNs. */
5465 if (! HONOR_NANS (mode))
5466 return fold (build2 (NE_EXPR, type, arg,
5467 build_real (TREE_TYPE (arg), c2)));
5469 /* sqrt(x) < y is x >= 0 when y is very large and we
5470 don't care about Infinities. */
5471 if (! HONOR_INFINITIES (mode))
5472 return fold (build2 (GE_EXPR, type, arg,
5473 build_real (TREE_TYPE (arg), dconst0)));
5475 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5476 if (lang_hooks.decls.global_bindings_p () != 0
5477 || CONTAINS_PLACEHOLDER_P (arg))
5480 arg = save_expr (arg);
5481 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5482 fold (build2 (GE_EXPR, type, arg,
5483 build_real (TREE_TYPE (arg),
5485 fold (build2 (NE_EXPR, type, arg,
5486 build_real (TREE_TYPE (arg),
5490 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5491 if (! HONOR_NANS (mode))
5492 return fold (build2 (code, type, arg,
5493 build_real (TREE_TYPE (arg), c2)));
5495 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5496 if (lang_hooks.decls.global_bindings_p () == 0
5497 && ! CONTAINS_PLACEHOLDER_P (arg))
5499 arg = save_expr (arg);
5500 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5501 fold (build2 (GE_EXPR, type, arg,
5502 build_real (TREE_TYPE (arg),
5504 fold (build2 (code, type, arg,
5505 build_real (TREE_TYPE (arg),
5514 /* Subroutine of fold() that optimizes comparisons against Infinities,
5515 either +Inf or -Inf.
5517 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5518 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5519 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5521 The function returns the constant folded tree if a simplification
5522 can be made, and NULL_TREE otherwise. */
5525 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5527 enum machine_mode mode;
5528 REAL_VALUE_TYPE max;
5532 mode = TYPE_MODE (TREE_TYPE (arg0));
5534 /* For negative infinity swap the sense of the comparison. */
5535 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5537 code = swap_tree_comparison (code);
5542 /* x > +Inf is always false, if with ignore sNANs. */
5543 if (HONOR_SNANS (mode))
5545 return omit_one_operand (type, integer_zero_node, arg0);
5548 /* x <= +Inf is always true, if we don't case about NaNs. */
5549 if (! HONOR_NANS (mode))
5550 return omit_one_operand (type, integer_one_node, arg0);
5552 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5553 if (lang_hooks.decls.global_bindings_p () == 0
5554 && ! CONTAINS_PLACEHOLDER_P (arg0))
5556 arg0 = save_expr (arg0);
5557 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5563 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5564 real_maxval (&max, neg, mode);
5565 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5566 arg0, build_real (TREE_TYPE (arg0), max)));
5569 /* x < +Inf is always equal to x <= DBL_MAX. */
5570 real_maxval (&max, neg, mode);
5571 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5572 arg0, build_real (TREE_TYPE (arg0), max)));
5575 /* x != +Inf is always equal to !(x > DBL_MAX). */
5576 real_maxval (&max, neg, mode);
5577 if (! HONOR_NANS (mode))
5578 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5579 arg0, build_real (TREE_TYPE (arg0), max)));
5581 /* The transformation below creates non-gimple code and thus is
5582 not appropriate if we are in gimple form. */
5586 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5587 arg0, build_real (TREE_TYPE (arg0), max)));
5588 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5597 /* Subroutine of fold() that optimizes comparisons of a division by
5598 a nonzero integer constant against an integer constant, i.e.
5601 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5602 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5603 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5605 The function returns the constant folded tree if a simplification
5606 can be made, and NULL_TREE otherwise. */
5609 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5611 tree prod, tmp, hi, lo;
5612 tree arg00 = TREE_OPERAND (arg0, 0);
5613 tree arg01 = TREE_OPERAND (arg0, 1);
5614 unsigned HOST_WIDE_INT lpart;
5615 HOST_WIDE_INT hpart;
5618 /* We have to do this the hard way to detect unsigned overflow.
5619 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5620 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5621 TREE_INT_CST_HIGH (arg01),
5622 TREE_INT_CST_LOW (arg1),
5623 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5624 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5625 prod = force_fit_type (prod, -1, overflow, false);
5627 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5629 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5632 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5633 overflow = add_double (TREE_INT_CST_LOW (prod),
5634 TREE_INT_CST_HIGH (prod),
5635 TREE_INT_CST_LOW (tmp),
5636 TREE_INT_CST_HIGH (tmp),
5638 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5639 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5640 TREE_CONSTANT_OVERFLOW (prod));
5642 else if (tree_int_cst_sgn (arg01) >= 0)
5644 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5645 switch (tree_int_cst_sgn (arg1))
5648 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5653 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5658 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5668 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5669 switch (tree_int_cst_sgn (arg1))
5672 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5677 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5682 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5694 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5695 return omit_one_operand (type, integer_zero_node, arg00);
5696 if (TREE_OVERFLOW (hi))
5697 return fold (build2 (GE_EXPR, type, arg00, lo));
5698 if (TREE_OVERFLOW (lo))
5699 return fold (build2 (LE_EXPR, type, arg00, hi));
5700 return build_range_check (type, arg00, 1, lo, hi);
5703 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5704 return omit_one_operand (type, integer_one_node, arg00);
5705 if (TREE_OVERFLOW (hi))
5706 return fold (build2 (LT_EXPR, type, arg00, lo));
5707 if (TREE_OVERFLOW (lo))
5708 return fold (build2 (GT_EXPR, type, arg00, hi));
5709 return build_range_check (type, arg00, 0, lo, hi);
5712 if (TREE_OVERFLOW (lo))
5713 return omit_one_operand (type, integer_zero_node, arg00);
5714 return fold (build2 (LT_EXPR, type, arg00, lo));
5717 if (TREE_OVERFLOW (hi))
5718 return omit_one_operand (type, integer_one_node, arg00);
5719 return fold (build2 (LE_EXPR, type, arg00, hi));
5722 if (TREE_OVERFLOW (hi))
5723 return omit_one_operand (type, integer_zero_node, arg00);
5724 return fold (build2 (GT_EXPR, type, arg00, hi));
5727 if (TREE_OVERFLOW (lo))
5728 return omit_one_operand (type, integer_one_node, arg00);
5729 return fold (build2 (GE_EXPR, type, arg00, lo));
5739 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5740 equality/inequality test, then return a simplified form of
5741 the test using shifts and logical operations. Otherwise return
5742 NULL. TYPE is the desired result type. */
5745 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5748 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5750 if (code == TRUTH_NOT_EXPR)
5752 code = TREE_CODE (arg0);
5753 if (code != NE_EXPR && code != EQ_EXPR)
5756 /* Extract the arguments of the EQ/NE. */
5757 arg1 = TREE_OPERAND (arg0, 1);
5758 arg0 = TREE_OPERAND (arg0, 0);
5760 /* This requires us to invert the code. */
5761 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5764 /* If this is testing a single bit, we can optimize the test. */
5765 if ((code == NE_EXPR || code == EQ_EXPR)
5766 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5767 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5769 tree inner = TREE_OPERAND (arg0, 0);
5770 tree type = TREE_TYPE (arg0);
5771 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5772 enum machine_mode operand_mode = TYPE_MODE (type);
5774 tree signed_type, unsigned_type, intermediate_type;
5777 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5778 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5779 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5780 if (arg00 != NULL_TREE
5781 /* This is only a win if casting to a signed type is cheap,
5782 i.e. when arg00's type is not a partial mode. */
5783 && TYPE_PRECISION (TREE_TYPE (arg00))
5784 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5786 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5787 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5788 result_type, fold_convert (stype, arg00),
5789 fold_convert (stype, integer_zero_node)));
5792 /* Otherwise we have (A & C) != 0 where C is a single bit,
5793 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5794 Similarly for (A & C) == 0. */
5796 /* If INNER is a right shift of a constant and it plus BITNUM does
5797 not overflow, adjust BITNUM and INNER. */
5798 if (TREE_CODE (inner) == RSHIFT_EXPR
5799 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5800 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5801 && bitnum < TYPE_PRECISION (type)
5802 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5803 bitnum - TYPE_PRECISION (type)))
5805 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5806 inner = TREE_OPERAND (inner, 0);
5809 /* If we are going to be able to omit the AND below, we must do our
5810 operations as unsigned. If we must use the AND, we have a choice.
5811 Normally unsigned is faster, but for some machines signed is. */
5812 #ifdef LOAD_EXTEND_OP
5813 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5818 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5819 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5820 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5821 inner = fold_convert (intermediate_type, inner);
5824 inner = build2 (RSHIFT_EXPR, intermediate_type,
5825 inner, size_int (bitnum));
5827 if (code == EQ_EXPR)
5828 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5829 inner, integer_one_node));
5831 /* Put the AND last so it can combine with more things. */
5832 inner = build2 (BIT_AND_EXPR, intermediate_type,
5833 inner, integer_one_node);
5835 /* Make sure to return the proper type. */
5836 inner = fold_convert (result_type, inner);
5843 /* Check whether we are allowed to reorder operands arg0 and arg1,
5844 such that the evaluation of arg1 occurs before arg0. */
5847 reorder_operands_p (tree arg0, tree arg1)
5849 if (! flag_evaluation_order)
5851 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5853 return ! TREE_SIDE_EFFECTS (arg0)
5854 && ! TREE_SIDE_EFFECTS (arg1);
5857 /* Test whether it is preferable two swap two operands, ARG0 and
5858 ARG1, for example because ARG0 is an integer constant and ARG1
5859 isn't. If REORDER is true, only recommend swapping if we can
5860 evaluate the operands in reverse order. */
5863 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5865 STRIP_SIGN_NOPS (arg0);
5866 STRIP_SIGN_NOPS (arg1);
5868 if (TREE_CODE (arg1) == INTEGER_CST)
5870 if (TREE_CODE (arg0) == INTEGER_CST)
5873 if (TREE_CODE (arg1) == REAL_CST)
5875 if (TREE_CODE (arg0) == REAL_CST)
5878 if (TREE_CODE (arg1) == COMPLEX_CST)
5880 if (TREE_CODE (arg0) == COMPLEX_CST)
5883 if (TREE_CONSTANT (arg1))
5885 if (TREE_CONSTANT (arg0))
5891 if (reorder && flag_evaluation_order
5892 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5900 if (reorder && flag_evaluation_order
5901 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5909 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5910 for commutative and comparison operators. Ensuring a canonical
5911 form allows the optimizers to find additional redundancies without
5912 having to explicitly check for both orderings. */
5913 if (TREE_CODE (arg0) == SSA_NAME
5914 && TREE_CODE (arg1) == SSA_NAME
5915 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5921 /* Perform constant folding and related simplification of EXPR.
5922 The related simplifications include x*1 => x, x*0 => 0, etc.,
5923 and application of the associative law.
5924 NOP_EXPR conversions may be removed freely (as long as we
5925 are careful not to change the type of the overall expression).
5926 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5927 but we can constant-fold them if they have constant operands. */
5929 #ifdef ENABLE_FOLD_CHECKING
5930 # define fold(x) fold_1 (x)
5931 static tree fold_1 (tree);
5937 const tree t = expr;
5938 const tree type = TREE_TYPE (expr);
5939 tree t1 = NULL_TREE;
5941 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5942 enum tree_code code = TREE_CODE (t);
5943 int kind = TREE_CODE_CLASS (code);
5945 /* WINS will be nonzero when the switch is done
5946 if all operands are constant. */
5949 /* Return right away if a constant. */
5953 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5957 /* Special case for conversion ops that can have fixed point args. */
5958 arg0 = TREE_OPERAND (t, 0);
5960 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5962 STRIP_SIGN_NOPS (arg0);
5964 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5965 subop = TREE_REALPART (arg0);
5969 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5970 && TREE_CODE (subop) != REAL_CST)
5971 /* Note that TREE_CONSTANT isn't enough:
5972 static var addresses are constant but we can't
5973 do arithmetic on them. */
5976 else if (IS_EXPR_CODE_CLASS (kind))
5978 int len = first_rtl_op (code);
5980 for (i = 0; i < len; i++)
5982 tree op = TREE_OPERAND (t, i);
5986 continue; /* Valid for CALL_EXPR, at least. */
5988 /* Strip any conversions that don't change the mode. This is
5989 safe for every expression, except for a comparison expression
5990 because its signedness is derived from its operands. So, in
5991 the latter case, only strip conversions that don't change the
5994 Note that this is done as an internal manipulation within the
5995 constant folder, in order to find the simplest representation
5996 of the arguments so that their form can be studied. In any
5997 cases, the appropriate type conversions should be put back in
5998 the tree that will get out of the constant folder. */
6000 STRIP_SIGN_NOPS (op);
6004 if (TREE_CODE (op) == COMPLEX_CST)
6005 subop = TREE_REALPART (op);
6009 if (TREE_CODE (subop) != INTEGER_CST
6010 && TREE_CODE (subop) != REAL_CST)
6011 /* Note that TREE_CONSTANT isn't enough:
6012 static var addresses are constant but we can't
6013 do arithmetic on them. */
6023 /* If this is a commutative operation, and ARG0 is a constant, move it
6024 to ARG1 to reduce the number of tests below. */
6025 if (commutative_tree_code (code)
6026 && tree_swap_operands_p (arg0, arg1, true))
6027 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6028 TREE_OPERAND (t, 0)));
6030 /* Now WINS is set as described above,
6031 ARG0 is the first operand of EXPR,
6032 and ARG1 is the second operand (if it has more than one operand).
6034 First check for cases where an arithmetic operation is applied to a
6035 compound, conditional, or comparison operation. Push the arithmetic
6036 operation inside the compound or conditional to see if any folding
6037 can then be done. Convert comparison to conditional for this purpose.
6038 The also optimizes non-constant cases that used to be done in
6041 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6042 one of the operands is a comparison and the other is a comparison, a
6043 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6044 code below would make the expression more complex. Change it to a
6045 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6046 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6048 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6049 || code == EQ_EXPR || code == NE_EXPR)
6050 && ((truth_value_p (TREE_CODE (arg0))
6051 && (truth_value_p (TREE_CODE (arg1))
6052 || (TREE_CODE (arg1) == BIT_AND_EXPR
6053 && integer_onep (TREE_OPERAND (arg1, 1)))))
6054 || (truth_value_p (TREE_CODE (arg1))
6055 && (truth_value_p (TREE_CODE (arg0))
6056 || (TREE_CODE (arg0) == BIT_AND_EXPR
6057 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6059 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6060 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6062 type, fold_convert (boolean_type_node, arg0),
6063 fold_convert (boolean_type_node, arg1)));
6065 if (code == EQ_EXPR)
6066 tem = invert_truthvalue (tem);
6071 if (TREE_CODE_CLASS (code) == '1')
6073 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6074 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6075 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6076 else if (TREE_CODE (arg0) == COND_EXPR)
6078 tree arg01 = TREE_OPERAND (arg0, 1);
6079 tree arg02 = TREE_OPERAND (arg0, 2);
6080 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6081 arg01 = fold (build1 (code, type, arg01));
6082 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6083 arg02 = fold (build1 (code, type, arg02));
6084 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6087 /* If this was a conversion, and all we did was to move into
6088 inside the COND_EXPR, bring it back out. But leave it if
6089 it is a conversion from integer to integer and the
6090 result precision is no wider than a word since such a
6091 conversion is cheap and may be optimized away by combine,
6092 while it couldn't if it were outside the COND_EXPR. Then return
6093 so we don't get into an infinite recursion loop taking the
6094 conversion out and then back in. */
6096 if ((code == NOP_EXPR || code == CONVERT_EXPR
6097 || code == NON_LVALUE_EXPR)
6098 && TREE_CODE (tem) == COND_EXPR
6099 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6100 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6101 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6102 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6103 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6104 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6105 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6107 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6108 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6109 tem = build1 (code, type,
6111 TREE_TYPE (TREE_OPERAND
6112 (TREE_OPERAND (tem, 1), 0)),
6113 TREE_OPERAND (tem, 0),
6114 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6115 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6118 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6120 if (TREE_CODE (type) == BOOLEAN_TYPE)
6122 arg0 = copy_node (arg0);
6123 TREE_TYPE (arg0) = type;
6126 else if (TREE_CODE (type) != INTEGER_TYPE)
6127 return fold (build3 (COND_EXPR, type, arg0,
6128 fold (build1 (code, type,
6130 fold (build1 (code, type,
6131 integer_zero_node))));
6134 else if (TREE_CODE_CLASS (code) == '<'
6135 && TREE_CODE (arg0) == COMPOUND_EXPR)
6136 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6137 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6138 else if (TREE_CODE_CLASS (code) == '<'
6139 && TREE_CODE (arg1) == COMPOUND_EXPR)
6140 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6141 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6142 else if (TREE_CODE_CLASS (code) == '2'
6143 || TREE_CODE_CLASS (code) == '<')
6145 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6146 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6147 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6149 if (TREE_CODE (arg1) == COMPOUND_EXPR
6150 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6151 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6152 fold (build2 (code, type,
6153 arg0, TREE_OPERAND (arg1, 1))));
6155 if (TREE_CODE (arg0) == COND_EXPR
6156 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6158 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6159 /*cond_first_p=*/1);
6160 if (tem != NULL_TREE)
6164 if (TREE_CODE (arg1) == COND_EXPR
6165 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6167 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6168 /*cond_first_p=*/0);
6169 if (tem != NULL_TREE)
6177 return fold (DECL_INITIAL (t));
6182 case FIX_TRUNC_EXPR:
6184 case FIX_FLOOR_EXPR:
6185 case FIX_ROUND_EXPR:
6186 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6187 return TREE_OPERAND (t, 0);
6189 /* Handle cases of two conversions in a row. */
6190 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6191 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6193 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6194 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6195 int inside_int = INTEGRAL_TYPE_P (inside_type);
6196 int inside_ptr = POINTER_TYPE_P (inside_type);
6197 int inside_float = FLOAT_TYPE_P (inside_type);
6198 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6199 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6200 int inter_int = INTEGRAL_TYPE_P (inter_type);
6201 int inter_ptr = POINTER_TYPE_P (inter_type);
6202 int inter_float = FLOAT_TYPE_P (inter_type);
6203 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6204 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6205 int final_int = INTEGRAL_TYPE_P (type);
6206 int final_ptr = POINTER_TYPE_P (type);
6207 int final_float = FLOAT_TYPE_P (type);
6208 unsigned int final_prec = TYPE_PRECISION (type);
6209 int final_unsignedp = TYPE_UNSIGNED (type);
6211 /* In addition to the cases of two conversions in a row
6212 handled below, if we are converting something to its own
6213 type via an object of identical or wider precision, neither
6214 conversion is needed. */
6215 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6216 && ((inter_int && final_int) || (inter_float && final_float))
6217 && inter_prec >= final_prec)
6218 return fold (build1 (code, type,
6219 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6221 /* Likewise, if the intermediate and final types are either both
6222 float or both integer, we don't need the middle conversion if
6223 it is wider than the final type and doesn't change the signedness
6224 (for integers). Avoid this if the final type is a pointer
6225 since then we sometimes need the inner conversion. Likewise if
6226 the outer has a precision not equal to the size of its mode. */
6227 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6228 || (inter_float && inside_float))
6229 && inter_prec >= inside_prec
6230 && (inter_float || inter_unsignedp == inside_unsignedp)
6231 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6232 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6234 return fold (build1 (code, type,
6235 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6237 /* If we have a sign-extension of a zero-extended value, we can
6238 replace that by a single zero-extension. */
6239 if (inside_int && inter_int && final_int
6240 && inside_prec < inter_prec && inter_prec < final_prec
6241 && inside_unsignedp && !inter_unsignedp)
6242 return fold (build1 (code, type,
6243 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6245 /* Two conversions in a row are not needed unless:
6246 - some conversion is floating-point (overstrict for now), or
6247 - the intermediate type is narrower than both initial and
6249 - the intermediate type and innermost type differ in signedness,
6250 and the outermost type is wider than the intermediate, or
6251 - the initial type is a pointer type and the precisions of the
6252 intermediate and final types differ, or
6253 - the final type is a pointer type and the precisions of the
6254 initial and intermediate types differ. */
6255 if (! inside_float && ! inter_float && ! final_float
6256 && (inter_prec > inside_prec || inter_prec > final_prec)
6257 && ! (inside_int && inter_int
6258 && inter_unsignedp != inside_unsignedp
6259 && inter_prec < final_prec)
6260 && ((inter_unsignedp && inter_prec > inside_prec)
6261 == (final_unsignedp && final_prec > inter_prec))
6262 && ! (inside_ptr && inter_prec != final_prec)
6263 && ! (final_ptr && inside_prec != inter_prec)
6264 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6265 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6267 return fold (build1 (code, type,
6268 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6271 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6272 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6273 /* Detect assigning a bitfield. */
6274 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6275 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6277 /* Don't leave an assignment inside a conversion
6278 unless assigning a bitfield. */
6279 tree prev = TREE_OPERAND (t, 0);
6280 tem = copy_node (t);
6281 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6282 /* First do the assignment, then return converted constant. */
6283 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6284 TREE_NO_WARNING (tem) = 1;
6285 TREE_USED (tem) = 1;
6289 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6290 constants (if x has signed type, the sign bit cannot be set
6291 in c). This folds extension into the BIT_AND_EXPR. */
6292 if (INTEGRAL_TYPE_P (type)
6293 && TREE_CODE (type) != BOOLEAN_TYPE
6294 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6295 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6297 tree and = TREE_OPERAND (t, 0);
6298 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6301 if (TYPE_UNSIGNED (TREE_TYPE (and))
6302 || (TYPE_PRECISION (type)
6303 <= TYPE_PRECISION (TREE_TYPE (and))))
6305 else if (TYPE_PRECISION (TREE_TYPE (and1))
6306 <= HOST_BITS_PER_WIDE_INT
6307 && host_integerp (and1, 1))
6309 unsigned HOST_WIDE_INT cst;
6311 cst = tree_low_cst (and1, 1);
6312 cst &= (HOST_WIDE_INT) -1
6313 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6314 change = (cst == 0);
6315 #ifdef LOAD_EXTEND_OP
6317 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6320 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6321 and0 = fold_convert (uns, and0);
6322 and1 = fold_convert (uns, and1);
6327 return fold (build2 (BIT_AND_EXPR, type,
6328 fold_convert (type, and0),
6329 fold_convert (type, and1)));
6332 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6333 T2 being pointers to types of the same size. */
6334 if (POINTER_TYPE_P (TREE_TYPE (t))
6335 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6336 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6337 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6339 tree arg00 = TREE_OPERAND (arg0, 0);
6340 tree t0 = TREE_TYPE (t);
6341 tree t1 = TREE_TYPE (arg00);
6342 tree tt0 = TREE_TYPE (t0);
6343 tree tt1 = TREE_TYPE (t1);
6344 tree s0 = TYPE_SIZE (tt0);
6345 tree s1 = TYPE_SIZE (tt1);
6347 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6348 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6349 TREE_OPERAND (arg0, 1));
6352 tem = fold_convert_const (code, type, arg0);
6353 return tem ? tem : t;
6355 case VIEW_CONVERT_EXPR:
6356 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6357 return build1 (VIEW_CONVERT_EXPR, type,
6358 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6362 if (TREE_CODE (arg0) == CONSTRUCTOR
6363 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6365 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6367 return TREE_VALUE (m);
6372 if (TREE_CONSTANT (t) != wins)
6374 tem = copy_node (t);
6375 TREE_CONSTANT (tem) = wins;
6376 TREE_INVARIANT (tem) = wins;
6382 if (negate_expr_p (arg0))
6383 return fold_convert (type, negate_expr (arg0));
6387 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6388 return fold_abs_const (arg0, type);
6389 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6390 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6391 /* Convert fabs((double)float) into (double)fabsf(float). */
6392 else if (TREE_CODE (arg0) == NOP_EXPR
6393 && TREE_CODE (type) == REAL_TYPE)
6395 tree targ0 = strip_float_extensions (arg0);
6397 return fold_convert (type, fold (build1 (ABS_EXPR,
6401 else if (tree_expr_nonnegative_p (arg0))
6406 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6407 return fold_convert (type, arg0);
6408 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6409 return build2 (COMPLEX_EXPR, type,
6410 TREE_OPERAND (arg0, 0),
6411 negate_expr (TREE_OPERAND (arg0, 1)));
6412 else if (TREE_CODE (arg0) == COMPLEX_CST)
6413 return build_complex (type, TREE_REALPART (arg0),
6414 negate_expr (TREE_IMAGPART (arg0)));
6415 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6416 return fold (build2 (TREE_CODE (arg0), type,
6417 fold (build1 (CONJ_EXPR, type,
6418 TREE_OPERAND (arg0, 0))),
6419 fold (build1 (CONJ_EXPR, type,
6420 TREE_OPERAND (arg0, 1)))));
6421 else if (TREE_CODE (arg0) == CONJ_EXPR)
6422 return TREE_OPERAND (arg0, 0);
6426 if (TREE_CODE (arg0) == INTEGER_CST)
6427 return fold_not_const (arg0, type);
6428 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6429 return TREE_OPERAND (arg0, 0);
6433 /* A + (-B) -> A - B */
6434 if (TREE_CODE (arg1) == NEGATE_EXPR)
6435 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6436 /* (-A) + B -> B - A */
6437 if (TREE_CODE (arg0) == NEGATE_EXPR
6438 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6439 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6440 if (! FLOAT_TYPE_P (type))
6442 if (integer_zerop (arg1))
6443 return non_lvalue (fold_convert (type, arg0));
6445 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6446 with a constant, and the two constants have no bits in common,
6447 we should treat this as a BIT_IOR_EXPR since this may produce more
6449 if (TREE_CODE (arg0) == BIT_AND_EXPR
6450 && TREE_CODE (arg1) == BIT_AND_EXPR
6451 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6452 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6453 && integer_zerop (const_binop (BIT_AND_EXPR,
6454 TREE_OPERAND (arg0, 1),
6455 TREE_OPERAND (arg1, 1), 0)))
6457 code = BIT_IOR_EXPR;
6461 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6462 (plus (plus (mult) (mult)) (foo)) so that we can
6463 take advantage of the factoring cases below. */
6464 if ((TREE_CODE (arg0) == PLUS_EXPR
6465 && TREE_CODE (arg1) == MULT_EXPR)
6466 || (TREE_CODE (arg1) == PLUS_EXPR
6467 && TREE_CODE (arg0) == MULT_EXPR))
6469 tree parg0, parg1, parg, marg;
6471 if (TREE_CODE (arg0) == PLUS_EXPR)
6472 parg = arg0, marg = arg1;
6474 parg = arg1, marg = arg0;
6475 parg0 = TREE_OPERAND (parg, 0);
6476 parg1 = TREE_OPERAND (parg, 1);
6480 if (TREE_CODE (parg0) == MULT_EXPR
6481 && TREE_CODE (parg1) != MULT_EXPR)
6482 return fold (build2 (PLUS_EXPR, type,
6483 fold (build2 (PLUS_EXPR, type,
6484 fold_convert (type, parg0),
6485 fold_convert (type, marg))),
6486 fold_convert (type, parg1)));
6487 if (TREE_CODE (parg0) != MULT_EXPR
6488 && TREE_CODE (parg1) == MULT_EXPR)
6489 return fold (build2 (PLUS_EXPR, type,
6490 fold (build2 (PLUS_EXPR, type,
6491 fold_convert (type, parg1),
6492 fold_convert (type, marg))),
6493 fold_convert (type, parg0)));
6496 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6498 tree arg00, arg01, arg10, arg11;
6499 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6501 /* (A * C) + (B * C) -> (A+B) * C.
6502 We are most concerned about the case where C is a constant,
6503 but other combinations show up during loop reduction. Since
6504 it is not difficult, try all four possibilities. */
6506 arg00 = TREE_OPERAND (arg0, 0);
6507 arg01 = TREE_OPERAND (arg0, 1);
6508 arg10 = TREE_OPERAND (arg1, 0);
6509 arg11 = TREE_OPERAND (arg1, 1);
6512 if (operand_equal_p (arg01, arg11, 0))
6513 same = arg01, alt0 = arg00, alt1 = arg10;
6514 else if (operand_equal_p (arg00, arg10, 0))
6515 same = arg00, alt0 = arg01, alt1 = arg11;
6516 else if (operand_equal_p (arg00, arg11, 0))
6517 same = arg00, alt0 = arg01, alt1 = arg10;
6518 else if (operand_equal_p (arg01, arg10, 0))
6519 same = arg01, alt0 = arg00, alt1 = arg11;
6521 /* No identical multiplicands; see if we can find a common
6522 power-of-two factor in non-power-of-two multiplies. This
6523 can help in multi-dimensional array access. */
6524 else if (TREE_CODE (arg01) == INTEGER_CST
6525 && TREE_CODE (arg11) == INTEGER_CST
6526 && TREE_INT_CST_HIGH (arg01) == 0
6527 && TREE_INT_CST_HIGH (arg11) == 0)
6529 HOST_WIDE_INT int01, int11, tmp;
6530 int01 = TREE_INT_CST_LOW (arg01);
6531 int11 = TREE_INT_CST_LOW (arg11);
6533 /* Move min of absolute values to int11. */
6534 if ((int01 >= 0 ? int01 : -int01)
6535 < (int11 >= 0 ? int11 : -int11))
6537 tmp = int01, int01 = int11, int11 = tmp;
6538 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6539 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6542 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6544 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6545 build_int_cst (NULL_TREE,
6553 return fold (build2 (MULT_EXPR, type,
6554 fold (build2 (PLUS_EXPR, type,
6561 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6562 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6563 return non_lvalue (fold_convert (type, arg0));
6565 /* Likewise if the operands are reversed. */
6566 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6567 return non_lvalue (fold_convert (type, arg1));
6569 /* Convert X + -C into X - C. */
6570 if (TREE_CODE (arg1) == REAL_CST
6571 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6573 tem = fold_negate_const (arg1, type);
6574 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6575 return fold (build2 (MINUS_EXPR, type,
6576 fold_convert (type, arg0),
6577 fold_convert (type, tem)));
6580 /* Convert x+x into x*2.0. */
6581 if (operand_equal_p (arg0, arg1, 0)
6582 && SCALAR_FLOAT_TYPE_P (type))
6583 return fold (build2 (MULT_EXPR, type, arg0,
6584 build_real (type, dconst2)));
6586 /* Convert x*c+x into x*(c+1). */
6587 if (flag_unsafe_math_optimizations
6588 && TREE_CODE (arg0) == MULT_EXPR
6589 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6590 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6591 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6595 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6596 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6597 return fold (build2 (MULT_EXPR, type, arg1,
6598 build_real (type, c)));
6601 /* Convert x+x*c into x*(c+1). */
6602 if (flag_unsafe_math_optimizations
6603 && TREE_CODE (arg1) == MULT_EXPR
6604 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6605 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6606 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6610 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6611 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6612 return fold (build2 (MULT_EXPR, type, arg0,
6613 build_real (type, c)));
6616 /* Convert x*c1+x*c2 into x*(c1+c2). */
6617 if (flag_unsafe_math_optimizations
6618 && TREE_CODE (arg0) == MULT_EXPR
6619 && TREE_CODE (arg1) == MULT_EXPR
6620 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6621 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6622 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6623 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6624 && operand_equal_p (TREE_OPERAND (arg0, 0),
6625 TREE_OPERAND (arg1, 0), 0))
6627 REAL_VALUE_TYPE c1, c2;
6629 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6630 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6631 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6632 return fold (build2 (MULT_EXPR, type,
6633 TREE_OPERAND (arg0, 0),
6634 build_real (type, c1)));
6636 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6637 if (flag_unsafe_math_optimizations
6638 && TREE_CODE (arg1) == PLUS_EXPR
6639 && TREE_CODE (arg0) != MULT_EXPR)
6641 tree tree10 = TREE_OPERAND (arg1, 0);
6642 tree tree11 = TREE_OPERAND (arg1, 1);
6643 if (TREE_CODE (tree11) == MULT_EXPR
6644 && TREE_CODE (tree10) == MULT_EXPR)
6647 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6648 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6651 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6652 if (flag_unsafe_math_optimizations
6653 && TREE_CODE (arg0) == PLUS_EXPR
6654 && TREE_CODE (arg1) != MULT_EXPR)
6656 tree tree00 = TREE_OPERAND (arg0, 0);
6657 tree tree01 = TREE_OPERAND (arg0, 1);
6658 if (TREE_CODE (tree01) == MULT_EXPR
6659 && TREE_CODE (tree00) == MULT_EXPR)
6662 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6663 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6669 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6670 is a rotate of A by C1 bits. */
6671 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6672 is a rotate of A by B bits. */
6674 enum tree_code code0, code1;
6675 code0 = TREE_CODE (arg0);
6676 code1 = TREE_CODE (arg1);
6677 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6678 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6679 && operand_equal_p (TREE_OPERAND (arg0, 0),
6680 TREE_OPERAND (arg1, 0), 0)
6681 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6683 tree tree01, tree11;
6684 enum tree_code code01, code11;
6686 tree01 = TREE_OPERAND (arg0, 1);
6687 tree11 = TREE_OPERAND (arg1, 1);
6688 STRIP_NOPS (tree01);
6689 STRIP_NOPS (tree11);
6690 code01 = TREE_CODE (tree01);
6691 code11 = TREE_CODE (tree11);
6692 if (code01 == INTEGER_CST
6693 && code11 == INTEGER_CST
6694 && TREE_INT_CST_HIGH (tree01) == 0
6695 && TREE_INT_CST_HIGH (tree11) == 0
6696 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6697 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6698 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6699 code0 == LSHIFT_EXPR ? tree01 : tree11);
6700 else if (code11 == MINUS_EXPR)
6702 tree tree110, tree111;
6703 tree110 = TREE_OPERAND (tree11, 0);
6704 tree111 = TREE_OPERAND (tree11, 1);
6705 STRIP_NOPS (tree110);
6706 STRIP_NOPS (tree111);
6707 if (TREE_CODE (tree110) == INTEGER_CST
6708 && 0 == compare_tree_int (tree110,
6710 (TREE_TYPE (TREE_OPERAND
6712 && operand_equal_p (tree01, tree111, 0))
6713 return build2 ((code0 == LSHIFT_EXPR
6716 type, TREE_OPERAND (arg0, 0), tree01);
6718 else if (code01 == MINUS_EXPR)
6720 tree tree010, tree011;
6721 tree010 = TREE_OPERAND (tree01, 0);
6722 tree011 = TREE_OPERAND (tree01, 1);
6723 STRIP_NOPS (tree010);
6724 STRIP_NOPS (tree011);
6725 if (TREE_CODE (tree010) == INTEGER_CST
6726 && 0 == compare_tree_int (tree010,
6728 (TREE_TYPE (TREE_OPERAND
6730 && operand_equal_p (tree11, tree011, 0))
6731 return build2 ((code0 != LSHIFT_EXPR
6734 type, TREE_OPERAND (arg0, 0), tree11);
6740 /* In most languages, can't associate operations on floats through
6741 parentheses. Rather than remember where the parentheses were, we
6742 don't associate floats at all, unless the user has specified
6743 -funsafe-math-optimizations. */
6746 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6748 tree var0, con0, lit0, minus_lit0;
6749 tree var1, con1, lit1, minus_lit1;
6751 /* Split both trees into variables, constants, and literals. Then
6752 associate each group together, the constants with literals,
6753 then the result with variables. This increases the chances of
6754 literals being recombined later and of generating relocatable
6755 expressions for the sum of a constant and literal. */
6756 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6757 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6758 code == MINUS_EXPR);
6760 /* Only do something if we found more than two objects. Otherwise,
6761 nothing has changed and we risk infinite recursion. */
6762 if (2 < ((var0 != 0) + (var1 != 0)
6763 + (con0 != 0) + (con1 != 0)
6764 + (lit0 != 0) + (lit1 != 0)
6765 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6767 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6768 if (code == MINUS_EXPR)
6771 var0 = associate_trees (var0, var1, code, type);
6772 con0 = associate_trees (con0, con1, code, type);
6773 lit0 = associate_trees (lit0, lit1, code, type);
6774 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6776 /* Preserve the MINUS_EXPR if the negative part of the literal is
6777 greater than the positive part. Otherwise, the multiplicative
6778 folding code (i.e extract_muldiv) may be fooled in case
6779 unsigned constants are subtracted, like in the following
6780 example: ((X*2 + 4) - 8U)/2. */
6781 if (minus_lit0 && lit0)
6783 if (TREE_CODE (lit0) == INTEGER_CST
6784 && TREE_CODE (minus_lit0) == INTEGER_CST
6785 && tree_int_cst_lt (lit0, minus_lit0))
6787 minus_lit0 = associate_trees (minus_lit0, lit0,
6793 lit0 = associate_trees (lit0, minus_lit0,
6801 return fold_convert (type,
6802 associate_trees (var0, minus_lit0,
6806 con0 = associate_trees (con0, minus_lit0,
6808 return fold_convert (type,
6809 associate_trees (var0, con0,
6814 con0 = associate_trees (con0, lit0, code, type);
6815 return fold_convert (type, associate_trees (var0, con0,
6822 t1 = const_binop (code, arg0, arg1, 0);
6823 if (t1 != NULL_TREE)
6825 /* The return value should always have
6826 the same type as the original expression. */
6827 if (TREE_TYPE (t1) != type)
6828 t1 = fold_convert (type, t1);
6835 /* A - (-B) -> A + B */
6836 if (TREE_CODE (arg1) == NEGATE_EXPR)
6837 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6838 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6839 if (TREE_CODE (arg0) == NEGATE_EXPR
6840 && (FLOAT_TYPE_P (type)
6841 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6842 && negate_expr_p (arg1)
6843 && reorder_operands_p (arg0, arg1))
6844 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6845 TREE_OPERAND (arg0, 0)));
6847 if (! FLOAT_TYPE_P (type))
6849 if (! wins && integer_zerop (arg0))
6850 return negate_expr (fold_convert (type, arg1));
6851 if (integer_zerop (arg1))
6852 return non_lvalue (fold_convert (type, arg0));
6854 /* Fold A - (A & B) into ~B & A. */
6855 if (!TREE_SIDE_EFFECTS (arg0)
6856 && TREE_CODE (arg1) == BIT_AND_EXPR)
6858 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6859 return fold (build2 (BIT_AND_EXPR, type,
6860 fold (build1 (BIT_NOT_EXPR, type,
6861 TREE_OPERAND (arg1, 0))),
6863 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6864 return fold (build2 (BIT_AND_EXPR, type,
6865 fold (build1 (BIT_NOT_EXPR, type,
6866 TREE_OPERAND (arg1, 1))),
6870 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6871 any power of 2 minus 1. */
6872 if (TREE_CODE (arg0) == BIT_AND_EXPR
6873 && TREE_CODE (arg1) == BIT_AND_EXPR
6874 && operand_equal_p (TREE_OPERAND (arg0, 0),
6875 TREE_OPERAND (arg1, 0), 0))
6877 tree mask0 = TREE_OPERAND (arg0, 1);
6878 tree mask1 = TREE_OPERAND (arg1, 1);
6879 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6881 if (operand_equal_p (tem, mask1, 0))
6883 tem = fold (build2 (BIT_XOR_EXPR, type,
6884 TREE_OPERAND (arg0, 0), mask1));
6885 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6890 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6891 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6892 return non_lvalue (fold_convert (type, arg0));
6894 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6895 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6896 (-ARG1 + ARG0) reduces to -ARG1. */
6897 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6898 return negate_expr (fold_convert (type, arg1));
6900 /* Fold &x - &x. This can happen from &x.foo - &x.
6901 This is unsafe for certain floats even in non-IEEE formats.
6902 In IEEE, it is unsafe because it does wrong for NaNs.
6903 Also note that operand_equal_p is always false if an operand
6906 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6907 && operand_equal_p (arg0, arg1, 0))
6908 return fold_convert (type, integer_zero_node);
6910 /* A - B -> A + (-B) if B is easily negatable. */
6911 if (!wins && negate_expr_p (arg1)
6912 && ((FLOAT_TYPE_P (type)
6913 /* Avoid this transformation if B is a positive REAL_CST. */
6914 && (TREE_CODE (arg1) != REAL_CST
6915 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6916 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6917 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6919 if (TREE_CODE (arg0) == MULT_EXPR
6920 && TREE_CODE (arg1) == MULT_EXPR
6921 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6923 /* (A * C) - (B * C) -> (A-B) * C. */
6924 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6925 TREE_OPERAND (arg1, 1), 0))
6926 return fold (build2 (MULT_EXPR, type,
6927 fold (build2 (MINUS_EXPR, type,
6928 TREE_OPERAND (arg0, 0),
6929 TREE_OPERAND (arg1, 0))),
6930 TREE_OPERAND (arg0, 1)));
6931 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6932 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6933 TREE_OPERAND (arg1, 0), 0))
6934 return fold (build2 (MULT_EXPR, type,
6935 TREE_OPERAND (arg0, 0),
6936 fold (build2 (MINUS_EXPR, type,
6937 TREE_OPERAND (arg0, 1),
6938 TREE_OPERAND (arg1, 1)))));
6944 /* (-A) * (-B) -> A * B */
6945 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6946 return fold (build2 (MULT_EXPR, type,
6947 TREE_OPERAND (arg0, 0),
6948 negate_expr (arg1)));
6949 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6950 return fold (build2 (MULT_EXPR, type,
6952 TREE_OPERAND (arg1, 0)));
6954 if (! FLOAT_TYPE_P (type))
6956 if (integer_zerop (arg1))
6957 return omit_one_operand (type, arg1, arg0);
6958 if (integer_onep (arg1))
6959 return non_lvalue (fold_convert (type, arg0));
6961 /* (a * (1 << b)) is (a << b) */
6962 if (TREE_CODE (arg1) == LSHIFT_EXPR
6963 && integer_onep (TREE_OPERAND (arg1, 0)))
6964 return fold (build2 (LSHIFT_EXPR, type, arg0,
6965 TREE_OPERAND (arg1, 1)));
6966 if (TREE_CODE (arg0) == LSHIFT_EXPR
6967 && integer_onep (TREE_OPERAND (arg0, 0)))
6968 return fold (build2 (LSHIFT_EXPR, type, arg1,
6969 TREE_OPERAND (arg0, 1)));
6971 if (TREE_CODE (arg1) == INTEGER_CST
6972 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6973 fold_convert (type, arg1),
6975 return fold_convert (type, tem);
6980 /* Maybe fold x * 0 to 0. The expressions aren't the same
6981 when x is NaN, since x * 0 is also NaN. Nor are they the
6982 same in modes with signed zeros, since multiplying a
6983 negative value by 0 gives -0, not +0. */
6984 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6985 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6986 && real_zerop (arg1))
6987 return omit_one_operand (type, arg1, arg0);
6988 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6989 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6990 && real_onep (arg1))
6991 return non_lvalue (fold_convert (type, arg0));
6993 /* Transform x * -1.0 into -x. */
6994 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6995 && real_minus_onep (arg1))
6996 return fold_convert (type, negate_expr (arg0));
6998 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6999 if (flag_unsafe_math_optimizations
7000 && TREE_CODE (arg0) == RDIV_EXPR
7001 && TREE_CODE (arg1) == REAL_CST
7002 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7004 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7007 return fold (build2 (RDIV_EXPR, type, tem,
7008 TREE_OPERAND (arg0, 1)));
7011 if (flag_unsafe_math_optimizations)
7013 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7014 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7016 /* Optimizations of root(...)*root(...). */
7017 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7019 tree rootfn, arg, arglist;
7020 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7021 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7023 /* Optimize sqrt(x)*sqrt(x) as x. */
7024 if (BUILTIN_SQRT_P (fcode0)
7025 && operand_equal_p (arg00, arg10, 0)
7026 && ! HONOR_SNANS (TYPE_MODE (type)))
7029 /* Optimize root(x)*root(y) as root(x*y). */
7030 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7031 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7032 arglist = build_tree_list (NULL_TREE, arg);
7033 return build_function_call_expr (rootfn, arglist);
7036 /* Optimize expN(x)*expN(y) as expN(x+y). */
7037 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7039 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7040 tree arg = build2 (PLUS_EXPR, type,
7041 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7042 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7043 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7044 return build_function_call_expr (expfn, arglist);
7047 /* Optimizations of pow(...)*pow(...). */
7048 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7049 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7050 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7052 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7053 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7055 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7056 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7059 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7060 if (operand_equal_p (arg01, arg11, 0))
7062 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7063 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7064 tree arglist = tree_cons (NULL_TREE, fold (arg),
7065 build_tree_list (NULL_TREE,
7067 return build_function_call_expr (powfn, arglist);
7070 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7071 if (operand_equal_p (arg00, arg10, 0))
7073 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7074 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7075 tree arglist = tree_cons (NULL_TREE, arg00,
7076 build_tree_list (NULL_TREE,
7078 return build_function_call_expr (powfn, arglist);
7082 /* Optimize tan(x)*cos(x) as sin(x). */
7083 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7084 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7085 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7086 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7087 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7088 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7089 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7090 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7092 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7094 if (sinfn != NULL_TREE)
7095 return build_function_call_expr (sinfn,
7096 TREE_OPERAND (arg0, 1));
7099 /* Optimize x*pow(x,c) as pow(x,c+1). */
7100 if (fcode1 == BUILT_IN_POW
7101 || fcode1 == BUILT_IN_POWF
7102 || fcode1 == BUILT_IN_POWL)
7104 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7105 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7107 if (TREE_CODE (arg11) == REAL_CST
7108 && ! TREE_CONSTANT_OVERFLOW (arg11)
7109 && operand_equal_p (arg0, arg10, 0))
7111 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7115 c = TREE_REAL_CST (arg11);
7116 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7117 arg = build_real (type, c);
7118 arglist = build_tree_list (NULL_TREE, arg);
7119 arglist = tree_cons (NULL_TREE, arg0, arglist);
7120 return build_function_call_expr (powfn, arglist);
7124 /* Optimize pow(x,c)*x as pow(x,c+1). */
7125 if (fcode0 == BUILT_IN_POW
7126 || fcode0 == BUILT_IN_POWF
7127 || fcode0 == BUILT_IN_POWL)
7129 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7130 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7132 if (TREE_CODE (arg01) == REAL_CST
7133 && ! TREE_CONSTANT_OVERFLOW (arg01)
7134 && operand_equal_p (arg1, arg00, 0))
7136 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7140 c = TREE_REAL_CST (arg01);
7141 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7142 arg = build_real (type, c);
7143 arglist = build_tree_list (NULL_TREE, arg);
7144 arglist = tree_cons (NULL_TREE, arg1, arglist);
7145 return build_function_call_expr (powfn, arglist);
7149 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7151 && operand_equal_p (arg0, arg1, 0))
7153 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7157 tree arg = build_real (type, dconst2);
7158 tree arglist = build_tree_list (NULL_TREE, arg);
7159 arglist = tree_cons (NULL_TREE, arg0, arglist);
7160 return build_function_call_expr (powfn, arglist);
7169 if (integer_all_onesp (arg1))
7170 return omit_one_operand (type, arg1, arg0);
7171 if (integer_zerop (arg1))
7172 return non_lvalue (fold_convert (type, arg0));
7173 if (operand_equal_p (arg0, arg1, 0))
7174 return non_lvalue (fold_convert (type, arg0));
7177 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7178 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7180 t1 = build_int_cst (type, -1);
7181 t1 = force_fit_type (t1, 0, false, false);
7182 return omit_one_operand (type, t1, arg1);
7186 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7187 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7189 t1 = build_int_cst (type, -1);
7190 t1 = force_fit_type (t1, 0, false, false);
7191 return omit_one_operand (type, t1, arg0);
7194 t1 = distribute_bit_expr (code, type, arg0, arg1);
7195 if (t1 != NULL_TREE)
7198 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7200 This results in more efficient code for machines without a NAND
7201 instruction. Combine will canonicalize to the first form
7202 which will allow use of NAND instructions provided by the
7203 backend if they exist. */
7204 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7205 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7207 return fold (build1 (BIT_NOT_EXPR, type,
7208 build2 (BIT_AND_EXPR, type,
7209 TREE_OPERAND (arg0, 0),
7210 TREE_OPERAND (arg1, 0))));
7213 /* See if this can be simplified into a rotate first. If that
7214 is unsuccessful continue in the association code. */
7218 if (integer_zerop (arg1))
7219 return non_lvalue (fold_convert (type, arg0));
7220 if (integer_all_onesp (arg1))
7221 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7222 if (operand_equal_p (arg0, arg1, 0))
7223 return omit_one_operand (type, integer_zero_node, arg0);
7226 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7227 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7229 t1 = build_int_cst (type, -1);
7230 t1 = force_fit_type (t1, 0, false, false);
7231 return omit_one_operand (type, t1, arg1);
7235 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7236 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7238 t1 = build_int_cst (type, -1);
7239 t1 = force_fit_type (t1, 0, false, false);
7240 return omit_one_operand (type, t1, arg0);
7243 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7244 with a constant, and the two constants have no bits in common,
7245 we should treat this as a BIT_IOR_EXPR since this may produce more
7247 if (TREE_CODE (arg0) == BIT_AND_EXPR
7248 && TREE_CODE (arg1) == BIT_AND_EXPR
7249 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7250 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7251 && integer_zerop (const_binop (BIT_AND_EXPR,
7252 TREE_OPERAND (arg0, 1),
7253 TREE_OPERAND (arg1, 1), 0)))
7255 code = BIT_IOR_EXPR;
7259 /* See if this can be simplified into a rotate first. If that
7260 is unsuccessful continue in the association code. */
7264 if (integer_all_onesp (arg1))
7265 return non_lvalue (fold_convert (type, arg0));
7266 if (integer_zerop (arg1))
7267 return omit_one_operand (type, arg1, arg0);
7268 if (operand_equal_p (arg0, arg1, 0))
7269 return non_lvalue (fold_convert (type, arg0));
7271 /* ~X & X is always zero. */
7272 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7273 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7274 return omit_one_operand (type, integer_zero_node, arg1);
7276 /* X & ~X is always zero. */
7277 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7278 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7279 return omit_one_operand (type, integer_zero_node, arg0);
7281 t1 = distribute_bit_expr (code, type, arg0, arg1);
7282 if (t1 != NULL_TREE)
7284 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7285 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7286 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7289 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7291 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7292 && (~TREE_INT_CST_LOW (arg1)
7293 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7294 return fold_convert (type, TREE_OPERAND (arg0, 0));
7297 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7299 This results in more efficient code for machines without a NOR
7300 instruction. Combine will canonicalize to the first form
7301 which will allow use of NOR instructions provided by the
7302 backend if they exist. */
7303 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7304 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7306 return fold (build1 (BIT_NOT_EXPR, type,
7307 build2 (BIT_IOR_EXPR, type,
7308 TREE_OPERAND (arg0, 0),
7309 TREE_OPERAND (arg1, 0))));
7315 /* Don't touch a floating-point divide by zero unless the mode
7316 of the constant can represent infinity. */
7317 if (TREE_CODE (arg1) == REAL_CST
7318 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7319 && real_zerop (arg1))
7322 /* (-A) / (-B) -> A / B */
7323 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7324 return fold (build2 (RDIV_EXPR, type,
7325 TREE_OPERAND (arg0, 0),
7326 negate_expr (arg1)));
7327 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7328 return fold (build2 (RDIV_EXPR, type,
7330 TREE_OPERAND (arg1, 0)));
7332 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7333 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7334 && real_onep (arg1))
7335 return non_lvalue (fold_convert (type, arg0));
7337 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7338 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7339 && real_minus_onep (arg1))
7340 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7342 /* If ARG1 is a constant, we can convert this to a multiply by the
7343 reciprocal. This does not have the same rounding properties,
7344 so only do this if -funsafe-math-optimizations. We can actually
7345 always safely do it if ARG1 is a power of two, but it's hard to
7346 tell if it is or not in a portable manner. */
7347 if (TREE_CODE (arg1) == REAL_CST)
7349 if (flag_unsafe_math_optimizations
7350 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7352 return fold (build2 (MULT_EXPR, type, arg0, tem));
7353 /* Find the reciprocal if optimizing and the result is exact. */
7357 r = TREE_REAL_CST (arg1);
7358 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7360 tem = build_real (type, r);
7361 return fold (build2 (MULT_EXPR, type, arg0, tem));
7365 /* Convert A/B/C to A/(B*C). */
7366 if (flag_unsafe_math_optimizations
7367 && TREE_CODE (arg0) == RDIV_EXPR)
7368 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7369 fold (build2 (MULT_EXPR, type,
7370 TREE_OPERAND (arg0, 1), arg1))));
7372 /* Convert A/(B/C) to (A/B)*C. */
7373 if (flag_unsafe_math_optimizations
7374 && TREE_CODE (arg1) == RDIV_EXPR)
7375 return fold (build2 (MULT_EXPR, type,
7376 fold (build2 (RDIV_EXPR, type, arg0,
7377 TREE_OPERAND (arg1, 0))),
7378 TREE_OPERAND (arg1, 1)));
7380 /* Convert C1/(X*C2) into (C1/C2)/X. */
7381 if (flag_unsafe_math_optimizations
7382 && TREE_CODE (arg1) == MULT_EXPR
7383 && TREE_CODE (arg0) == REAL_CST
7384 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7386 tree tem = const_binop (RDIV_EXPR, arg0,
7387 TREE_OPERAND (arg1, 1), 0);
7389 return fold (build2 (RDIV_EXPR, type, tem,
7390 TREE_OPERAND (arg1, 0)));
7393 if (flag_unsafe_math_optimizations)
7395 enum built_in_function fcode = builtin_mathfn_code (arg1);
7396 /* Optimize x/expN(y) into x*expN(-y). */
7397 if (BUILTIN_EXPONENT_P (fcode))
7399 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7400 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7401 tree arglist = build_tree_list (NULL_TREE,
7402 fold_convert (type, arg));
7403 arg1 = build_function_call_expr (expfn, arglist);
7404 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7407 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7408 if (fcode == BUILT_IN_POW
7409 || fcode == BUILT_IN_POWF
7410 || fcode == BUILT_IN_POWL)
7412 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7413 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7414 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7415 tree neg11 = fold_convert (type, negate_expr (arg11));
7416 tree arglist = tree_cons(NULL_TREE, arg10,
7417 build_tree_list (NULL_TREE, neg11));
7418 arg1 = build_function_call_expr (powfn, arglist);
7419 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7423 if (flag_unsafe_math_optimizations)
7425 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7426 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7428 /* Optimize sin(x)/cos(x) as tan(x). */
7429 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7430 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7431 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7432 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7433 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7435 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7437 if (tanfn != NULL_TREE)
7438 return build_function_call_expr (tanfn,
7439 TREE_OPERAND (arg0, 1));
7442 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7443 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7444 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7445 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7446 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7447 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7449 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7451 if (tanfn != NULL_TREE)
7453 tree tmp = TREE_OPERAND (arg0, 1);
7454 tmp = build_function_call_expr (tanfn, tmp);
7455 return fold (build2 (RDIV_EXPR, type,
7456 build_real (type, dconst1), tmp));
7460 /* Optimize pow(x,c)/x as pow(x,c-1). */
7461 if (fcode0 == BUILT_IN_POW
7462 || fcode0 == BUILT_IN_POWF
7463 || fcode0 == BUILT_IN_POWL)
7465 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7466 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7467 if (TREE_CODE (arg01) == REAL_CST
7468 && ! TREE_CONSTANT_OVERFLOW (arg01)
7469 && operand_equal_p (arg1, arg00, 0))
7471 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7475 c = TREE_REAL_CST (arg01);
7476 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7477 arg = build_real (type, c);
7478 arglist = build_tree_list (NULL_TREE, arg);
7479 arglist = tree_cons (NULL_TREE, arg1, arglist);
7480 return build_function_call_expr (powfn, arglist);
7486 case TRUNC_DIV_EXPR:
7487 case ROUND_DIV_EXPR:
7488 case FLOOR_DIV_EXPR:
7490 case EXACT_DIV_EXPR:
7491 if (integer_onep (arg1))
7492 return non_lvalue (fold_convert (type, arg0));
7493 if (integer_zerop (arg1))
7496 if (!TYPE_UNSIGNED (type)
7497 && TREE_CODE (arg1) == INTEGER_CST
7498 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7499 && TREE_INT_CST_HIGH (arg1) == -1)
7500 return fold_convert (type, negate_expr (arg0));
7502 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7503 operation, EXACT_DIV_EXPR.
7505 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7506 At one time others generated faster code, it's not clear if they do
7507 after the last round to changes to the DIV code in expmed.c. */
7508 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7509 && multiple_of_p (type, arg0, arg1))
7510 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7512 if (TREE_CODE (arg1) == INTEGER_CST
7513 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7515 return fold_convert (type, tem);
7520 case FLOOR_MOD_EXPR:
7521 case ROUND_MOD_EXPR:
7522 case TRUNC_MOD_EXPR:
7523 if (integer_onep (arg1))
7524 return omit_one_operand (type, integer_zero_node, arg0);
7525 if (integer_zerop (arg1))
7528 /* X % -1 is zero. */
7529 if (!TYPE_UNSIGNED (type)
7530 && TREE_CODE (arg1) == INTEGER_CST
7531 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7532 && TREE_INT_CST_HIGH (arg1) == -1)
7533 return omit_one_operand (type, integer_zero_node, arg0);
7535 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7536 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7537 if (code == TRUNC_MOD_EXPR
7538 && TYPE_UNSIGNED (type)
7539 && integer_pow2p (arg1))
7541 unsigned HOST_WIDE_INT high, low;
7545 l = tree_log2 (arg1);
7546 if (l >= HOST_BITS_PER_WIDE_INT)
7548 high = ((unsigned HOST_WIDE_INT) 1
7549 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7555 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7558 mask = build_int_cst_wide (type, low, high);
7559 return fold (build2 (BIT_AND_EXPR, type,
7560 fold_convert (type, arg0), mask));
7563 /* X % -C is the same as X % C. */
7564 if (code == TRUNC_MOD_EXPR
7565 && !TYPE_UNSIGNED (type)
7566 && TREE_CODE (arg1) == INTEGER_CST
7567 && TREE_INT_CST_HIGH (arg1) < 0
7569 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7570 && !sign_bit_p (arg1, arg1))
7571 return fold (build2 (code, type, fold_convert (type, arg0),
7572 fold_convert (type, negate_expr (arg1))));
7574 /* X % -Y is the same as X % Y. */
7575 if (code == TRUNC_MOD_EXPR
7576 && !TYPE_UNSIGNED (type)
7577 && TREE_CODE (arg1) == NEGATE_EXPR
7579 return fold (build2 (code, type, fold_convert (type, arg0),
7580 fold_convert (type, TREE_OPERAND (arg1, 0))));
7582 if (TREE_CODE (arg1) == INTEGER_CST
7583 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7585 return fold_convert (type, tem);
7591 if (integer_all_onesp (arg0))
7592 return omit_one_operand (type, arg0, arg1);
7596 /* Optimize -1 >> x for arithmetic right shifts. */
7597 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7598 return omit_one_operand (type, arg0, arg1);
7599 /* ... fall through ... */
7603 if (integer_zerop (arg1))
7604 return non_lvalue (fold_convert (type, arg0));
7605 if (integer_zerop (arg0))
7606 return omit_one_operand (type, arg0, arg1);
7608 /* Since negative shift count is not well-defined,
7609 don't try to compute it in the compiler. */
7610 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7612 /* Rewrite an LROTATE_EXPR by a constant into an
7613 RROTATE_EXPR by a new constant. */
7614 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7616 tree tem = build_int_cst (NULL_TREE,
7617 GET_MODE_BITSIZE (TYPE_MODE (type)));
7618 tem = fold_convert (TREE_TYPE (arg1), tem);
7619 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7620 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7623 /* If we have a rotate of a bit operation with the rotate count and
7624 the second operand of the bit operation both constant,
7625 permute the two operations. */
7626 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7627 && (TREE_CODE (arg0) == BIT_AND_EXPR
7628 || TREE_CODE (arg0) == BIT_IOR_EXPR
7629 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7630 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7631 return fold (build2 (TREE_CODE (arg0), type,
7632 fold (build2 (code, type,
7633 TREE_OPERAND (arg0, 0), arg1)),
7634 fold (build2 (code, type,
7635 TREE_OPERAND (arg0, 1), arg1))));
7637 /* Two consecutive rotates adding up to the width of the mode can
7639 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7640 && TREE_CODE (arg0) == RROTATE_EXPR
7641 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7642 && TREE_INT_CST_HIGH (arg1) == 0
7643 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7644 && ((TREE_INT_CST_LOW (arg1)
7645 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7646 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7647 return TREE_OPERAND (arg0, 0);
7652 if (operand_equal_p (arg0, arg1, 0))
7653 return omit_one_operand (type, arg0, arg1);
7654 if (INTEGRAL_TYPE_P (type)
7655 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7656 return omit_one_operand (type, arg1, arg0);
7660 if (operand_equal_p (arg0, arg1, 0))
7661 return omit_one_operand (type, arg0, arg1);
7662 if (INTEGRAL_TYPE_P (type)
7663 && TYPE_MAX_VALUE (type)
7664 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7665 return omit_one_operand (type, arg1, arg0);
7668 case TRUTH_NOT_EXPR:
7669 /* The argument to invert_truthvalue must have Boolean type. */
7670 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7671 arg0 = fold_convert (boolean_type_node, arg0);
7673 /* Note that the operand of this must be an int
7674 and its values must be 0 or 1.
7675 ("true" is a fixed value perhaps depending on the language,
7676 but we don't handle values other than 1 correctly yet.) */
7677 tem = invert_truthvalue (arg0);
7678 /* Avoid infinite recursion. */
7679 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7681 tem = fold_single_bit_test (code, arg0, arg1, type);
7686 return fold_convert (type, tem);
7688 case TRUTH_ANDIF_EXPR:
7689 /* Note that the operands of this must be ints
7690 and their values must be 0 or 1.
7691 ("true" is a fixed value perhaps depending on the language.) */
7692 /* If first arg is constant zero, return it. */
7693 if (integer_zerop (arg0))
7694 return fold_convert (type, arg0);
7695 case TRUTH_AND_EXPR:
7696 /* If either arg is constant true, drop it. */
7697 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7698 return non_lvalue (fold_convert (type, arg1));
7699 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7700 /* Preserve sequence points. */
7701 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7702 return non_lvalue (fold_convert (type, arg0));
7703 /* If second arg is constant zero, result is zero, but first arg
7704 must be evaluated. */
7705 if (integer_zerop (arg1))
7706 return omit_one_operand (type, arg1, arg0);
7707 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7708 case will be handled here. */
7709 if (integer_zerop (arg0))
7710 return omit_one_operand (type, arg0, arg1);
7712 /* !X && X is always false. */
7713 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7714 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7715 return omit_one_operand (type, integer_zero_node, arg1);
7716 /* X && !X is always false. */
7717 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7718 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7719 return omit_one_operand (type, integer_zero_node, arg0);
7722 /* We only do these simplifications if we are optimizing. */
7726 /* Check for things like (A || B) && (A || C). We can convert this
7727 to A || (B && C). Note that either operator can be any of the four
7728 truth and/or operations and the transformation will still be
7729 valid. Also note that we only care about order for the
7730 ANDIF and ORIF operators. If B contains side effects, this
7731 might change the truth-value of A. */
7732 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7733 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7734 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7735 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7736 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7737 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7739 tree a00 = TREE_OPERAND (arg0, 0);
7740 tree a01 = TREE_OPERAND (arg0, 1);
7741 tree a10 = TREE_OPERAND (arg1, 0);
7742 tree a11 = TREE_OPERAND (arg1, 1);
7743 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7744 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7745 && (code == TRUTH_AND_EXPR
7746 || code == TRUTH_OR_EXPR));
7748 if (operand_equal_p (a00, a10, 0))
7749 return fold (build2 (TREE_CODE (arg0), type, a00,
7750 fold (build2 (code, type, a01, a11))));
7751 else if (commutative && operand_equal_p (a00, a11, 0))
7752 return fold (build2 (TREE_CODE (arg0), type, a00,
7753 fold (build2 (code, type, a01, a10))));
7754 else if (commutative && operand_equal_p (a01, a10, 0))
7755 return fold (build2 (TREE_CODE (arg0), type, a01,
7756 fold (build2 (code, type, a00, a11))));
7758 /* This case if tricky because we must either have commutative
7759 operators or else A10 must not have side-effects. */
7761 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7762 && operand_equal_p (a01, a11, 0))
7763 return fold (build2 (TREE_CODE (arg0), type,
7764 fold (build2 (code, type, a00, a10)),
7768 /* See if we can build a range comparison. */
7769 if (0 != (tem = fold_range_test (t)))
7772 /* Check for the possibility of merging component references. If our
7773 lhs is another similar operation, try to merge its rhs with our
7774 rhs. Then try to merge our lhs and rhs. */
7775 if (TREE_CODE (arg0) == code
7776 && 0 != (tem = fold_truthop (code, type,
7777 TREE_OPERAND (arg0, 1), arg1)))
7778 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7780 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7785 case TRUTH_ORIF_EXPR:
7786 /* Note that the operands of this must be ints
7787 and their values must be 0 or true.
7788 ("true" is a fixed value perhaps depending on the language.) */
7789 /* If first arg is constant true, return it. */
7790 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7791 return fold_convert (type, arg0);
7793 /* If either arg is constant zero, drop it. */
7794 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7795 return non_lvalue (fold_convert (type, arg1));
7796 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7797 /* Preserve sequence points. */
7798 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7799 return non_lvalue (fold_convert (type, arg0));
7800 /* If second arg is constant true, result is true, but we must
7801 evaluate first arg. */
7802 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7803 return omit_one_operand (type, arg1, arg0);
7804 /* Likewise for first arg, but note this only occurs here for
7806 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7807 return omit_one_operand (type, arg0, arg1);
7809 /* !X || X is always true. */
7810 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7811 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7812 return omit_one_operand (type, integer_one_node, arg1);
7813 /* X || !X is always true. */
7814 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7815 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7816 return omit_one_operand (type, integer_one_node, arg0);
7820 case TRUTH_XOR_EXPR:
7821 /* If the second arg is constant zero, drop it. */
7822 if (integer_zerop (arg1))
7823 return non_lvalue (fold_convert (type, arg0));
7824 /* If the second arg is constant true, this is a logical inversion. */
7825 if (integer_onep (arg1))
7826 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7827 /* Identical arguments cancel to zero. */
7828 if (operand_equal_p (arg0, arg1, 0))
7829 return omit_one_operand (type, integer_zero_node, arg0);
7831 /* !X ^ X is always true. */
7832 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7833 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7834 return omit_one_operand (type, integer_one_node, arg1);
7836 /* X ^ !X is always true. */
7837 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7838 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7839 return omit_one_operand (type, integer_one_node, arg0);
7849 /* If one arg is a real or integer constant, put it last. */
7850 if (tree_swap_operands_p (arg0, arg1, true))
7851 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7853 /* If this is an equality comparison of the address of a non-weak
7854 object against zero, then we know the result. */
7855 if ((code == EQ_EXPR || code == NE_EXPR)
7856 && TREE_CODE (arg0) == ADDR_EXPR
7857 && DECL_P (TREE_OPERAND (arg0, 0))
7858 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7859 && integer_zerop (arg1))
7860 return constant_boolean_node (code != EQ_EXPR, type);
7862 /* If this is an equality comparison of the address of two non-weak,
7863 unaliased symbols neither of which are extern (since we do not
7864 have access to attributes for externs), then we know the result. */
7865 if ((code == EQ_EXPR || code == NE_EXPR)
7866 && TREE_CODE (arg0) == ADDR_EXPR
7867 && DECL_P (TREE_OPERAND (arg0, 0))
7868 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7869 && ! lookup_attribute ("alias",
7870 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7871 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7872 && TREE_CODE (arg1) == ADDR_EXPR
7873 && DECL_P (TREE_OPERAND (arg1, 0))
7874 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7875 && ! lookup_attribute ("alias",
7876 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7877 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7878 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7879 ? code == EQ_EXPR : code != EQ_EXPR,
7882 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7884 tree targ0 = strip_float_extensions (arg0);
7885 tree targ1 = strip_float_extensions (arg1);
7886 tree newtype = TREE_TYPE (targ0);
7888 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7889 newtype = TREE_TYPE (targ1);
7891 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7892 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7893 return fold (build2 (code, type, fold_convert (newtype, targ0),
7894 fold_convert (newtype, targ1)));
7896 /* (-a) CMP (-b) -> b CMP a */
7897 if (TREE_CODE (arg0) == NEGATE_EXPR
7898 && TREE_CODE (arg1) == NEGATE_EXPR)
7899 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7900 TREE_OPERAND (arg0, 0)));
7902 if (TREE_CODE (arg1) == REAL_CST)
7904 REAL_VALUE_TYPE cst;
7905 cst = TREE_REAL_CST (arg1);
7907 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7908 if (TREE_CODE (arg0) == NEGATE_EXPR)
7910 fold (build2 (swap_tree_comparison (code), type,
7911 TREE_OPERAND (arg0, 0),
7912 build_real (TREE_TYPE (arg1),
7913 REAL_VALUE_NEGATE (cst))));
7915 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7916 /* a CMP (-0) -> a CMP 0 */
7917 if (REAL_VALUE_MINUS_ZERO (cst))
7918 return fold (build2 (code, type, arg0,
7919 build_real (TREE_TYPE (arg1), dconst0)));
7921 /* x != NaN is always true, other ops are always false. */
7922 if (REAL_VALUE_ISNAN (cst)
7923 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7925 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7926 return omit_one_operand (type, tem, arg0);
7929 /* Fold comparisons against infinity. */
7930 if (REAL_VALUE_ISINF (cst))
7932 tem = fold_inf_compare (code, type, arg0, arg1);
7933 if (tem != NULL_TREE)
7938 /* If this is a comparison of a real constant with a PLUS_EXPR
7939 or a MINUS_EXPR of a real constant, we can convert it into a
7940 comparison with a revised real constant as long as no overflow
7941 occurs when unsafe_math_optimizations are enabled. */
7942 if (flag_unsafe_math_optimizations
7943 && TREE_CODE (arg1) == REAL_CST
7944 && (TREE_CODE (arg0) == PLUS_EXPR
7945 || TREE_CODE (arg0) == MINUS_EXPR)
7946 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7947 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7948 ? MINUS_EXPR : PLUS_EXPR,
7949 arg1, TREE_OPERAND (arg0, 1), 0))
7950 && ! TREE_CONSTANT_OVERFLOW (tem))
7951 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7953 /* Likewise, we can simplify a comparison of a real constant with
7954 a MINUS_EXPR whose first operand is also a real constant, i.e.
7955 (c1 - x) < c2 becomes x > c1-c2. */
7956 if (flag_unsafe_math_optimizations
7957 && TREE_CODE (arg1) == REAL_CST
7958 && TREE_CODE (arg0) == MINUS_EXPR
7959 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7960 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7962 && ! TREE_CONSTANT_OVERFLOW (tem))
7963 return fold (build2 (swap_tree_comparison (code), type,
7964 TREE_OPERAND (arg0, 1), tem));
7966 /* Fold comparisons against built-in math functions. */
7967 if (TREE_CODE (arg1) == REAL_CST
7968 && flag_unsafe_math_optimizations
7969 && ! flag_errno_math)
7971 enum built_in_function fcode = builtin_mathfn_code (arg0);
7973 if (fcode != END_BUILTINS)
7975 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7976 if (tem != NULL_TREE)
7982 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7983 if (TREE_CONSTANT (arg1)
7984 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7985 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7986 /* This optimization is invalid for ordered comparisons
7987 if CONST+INCR overflows or if foo+incr might overflow.
7988 This optimization is invalid for floating point due to rounding.
7989 For pointer types we assume overflow doesn't happen. */
7990 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7991 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7992 && (code == EQ_EXPR || code == NE_EXPR))))
7994 tree varop, newconst;
7996 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7998 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7999 arg1, TREE_OPERAND (arg0, 1)));
8000 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8001 TREE_OPERAND (arg0, 0),
8002 TREE_OPERAND (arg0, 1));
8006 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8007 arg1, TREE_OPERAND (arg0, 1)));
8008 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8009 TREE_OPERAND (arg0, 0),
8010 TREE_OPERAND (arg0, 1));
8014 /* If VAROP is a reference to a bitfield, we must mask
8015 the constant by the width of the field. */
8016 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8017 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8018 && host_integerp (DECL_SIZE (TREE_OPERAND
8019 (TREE_OPERAND (varop, 0), 1)), 1))
8021 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8022 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8023 tree folded_compare, shift;
8025 /* First check whether the comparison would come out
8026 always the same. If we don't do that we would
8027 change the meaning with the masking. */
8028 folded_compare = fold (build2 (code, type,
8029 TREE_OPERAND (varop, 0), arg1));
8030 if (integer_zerop (folded_compare)
8031 || integer_onep (folded_compare))
8032 return omit_one_operand (type, folded_compare, varop);
8034 shift = build_int_cst (NULL_TREE,
8035 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8036 shift = fold_convert (TREE_TYPE (varop), shift);
8037 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8039 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8043 return fold (build2 (code, type, varop, newconst));
8046 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8047 This transformation affects the cases which are handled in later
8048 optimizations involving comparisons with non-negative constants. */
8049 if (TREE_CODE (arg1) == INTEGER_CST
8050 && TREE_CODE (arg0) != INTEGER_CST
8051 && tree_int_cst_sgn (arg1) > 0)
8056 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8057 return fold (build2 (GT_EXPR, type, arg0, arg1));
8060 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8061 return fold (build2 (LE_EXPR, type, arg0, arg1));
8068 /* Comparisons with the highest or lowest possible integer of
8069 the specified size will have known values.
8071 This is quite similar to fold_relational_hi_lo; however, my
8072 attempts to share the code have been nothing but trouble.
8073 I give up for now. */
8075 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8077 if (TREE_CODE (arg1) == INTEGER_CST
8078 && ! TREE_CONSTANT_OVERFLOW (arg1)
8079 && width <= HOST_BITS_PER_WIDE_INT
8080 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8081 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8083 unsigned HOST_WIDE_INT signed_max;
8084 unsigned HOST_WIDE_INT max, min;
8086 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8088 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8090 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8096 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8099 if (TREE_INT_CST_HIGH (arg1) == 0
8100 && TREE_INT_CST_LOW (arg1) == max)
8104 return omit_one_operand (type, integer_zero_node, arg0);
8107 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8110 return omit_one_operand (type, integer_one_node, arg0);
8113 return fold (build2 (NE_EXPR, type, arg0, arg1));
8115 /* The GE_EXPR and LT_EXPR cases above are not normally
8116 reached because of previous transformations. */
8121 else if (TREE_INT_CST_HIGH (arg1) == 0
8122 && TREE_INT_CST_LOW (arg1) == max - 1)
8126 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8127 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8129 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8130 return fold (build2 (NE_EXPR, type, arg0, arg1));
8134 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8135 && TREE_INT_CST_LOW (arg1) == min)
8139 return omit_one_operand (type, integer_zero_node, arg0);
8142 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8145 return omit_one_operand (type, integer_one_node, arg0);
8148 return fold (build2 (NE_EXPR, type, arg0, arg1));
8153 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8154 && TREE_INT_CST_LOW (arg1) == min + 1)
8158 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8159 return fold (build2 (NE_EXPR, type, arg0, arg1));
8161 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8162 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8167 else if (!in_gimple_form
8168 && TREE_INT_CST_HIGH (arg1) == 0
8169 && TREE_INT_CST_LOW (arg1) == signed_max
8170 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8171 /* signed_type does not work on pointer types. */
8172 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8174 /* The following case also applies to X < signed_max+1
8175 and X >= signed_max+1 because previous transformations. */
8176 if (code == LE_EXPR || code == GT_EXPR)
8179 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8180 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8182 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8183 type, fold_convert (st0, arg0),
8184 fold_convert (st1, integer_zero_node)));
8190 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8191 a MINUS_EXPR of a constant, we can convert it into a comparison with
8192 a revised constant as long as no overflow occurs. */
8193 if ((code == EQ_EXPR || code == NE_EXPR)
8194 && TREE_CODE (arg1) == INTEGER_CST
8195 && (TREE_CODE (arg0) == PLUS_EXPR
8196 || TREE_CODE (arg0) == MINUS_EXPR)
8197 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8198 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8199 ? MINUS_EXPR : PLUS_EXPR,
8200 arg1, TREE_OPERAND (arg0, 1), 0))
8201 && ! TREE_CONSTANT_OVERFLOW (tem))
8202 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8204 /* Similarly for a NEGATE_EXPR. */
8205 else if ((code == EQ_EXPR || code == NE_EXPR)
8206 && TREE_CODE (arg0) == NEGATE_EXPR
8207 && TREE_CODE (arg1) == INTEGER_CST
8208 && 0 != (tem = negate_expr (arg1))
8209 && TREE_CODE (tem) == INTEGER_CST
8210 && ! TREE_CONSTANT_OVERFLOW (tem))
8211 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8213 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8214 for !=. Don't do this for ordered comparisons due to overflow. */
8215 else if ((code == NE_EXPR || code == EQ_EXPR)
8216 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8217 return fold (build2 (code, type,
8218 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8220 /* If we are widening one operand of an integer comparison,
8221 see if the other operand is similarly being widened. Perhaps we
8222 can do the comparison in the narrower type. */
8223 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8224 && TREE_CODE (arg0) == NOP_EXPR
8225 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8226 && (code == EQ_EXPR || code == NE_EXPR
8227 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8228 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8229 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8230 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8231 || (TREE_CODE (t1) == INTEGER_CST
8232 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8233 return fold (build2 (code, type, tem,
8234 fold_convert (TREE_TYPE (tem), t1)));
8236 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8237 constant, we can simplify it. */
8238 else if (TREE_CODE (arg1) == INTEGER_CST
8239 && (TREE_CODE (arg0) == MIN_EXPR
8240 || TREE_CODE (arg0) == MAX_EXPR)
8241 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8242 return optimize_minmax_comparison (t);
8244 /* If we are comparing an ABS_EXPR with a constant, we can
8245 convert all the cases into explicit comparisons, but they may
8246 well not be faster than doing the ABS and one comparison.
8247 But ABS (X) <= C is a range comparison, which becomes a subtraction
8248 and a comparison, and is probably faster. */
8249 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8250 && TREE_CODE (arg0) == ABS_EXPR
8251 && ! TREE_SIDE_EFFECTS (arg0)
8252 && (0 != (tem = negate_expr (arg1)))
8253 && TREE_CODE (tem) == INTEGER_CST
8254 && ! TREE_CONSTANT_OVERFLOW (tem))
8255 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8256 build2 (GE_EXPR, type,
8257 TREE_OPERAND (arg0, 0), tem),
8258 build2 (LE_EXPR, type,
8259 TREE_OPERAND (arg0, 0), arg1)));
8261 /* If this is an EQ or NE comparison with zero and ARG0 is
8262 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8263 two operations, but the latter can be done in one less insn
8264 on machines that have only two-operand insns or on which a
8265 constant cannot be the first operand. */
8266 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8267 && TREE_CODE (arg0) == BIT_AND_EXPR)
8269 tree arg00 = TREE_OPERAND (arg0, 0);
8270 tree arg01 = TREE_OPERAND (arg0, 1);
8271 if (TREE_CODE (arg00) == LSHIFT_EXPR
8272 && integer_onep (TREE_OPERAND (arg00, 0)))
8274 fold (build2 (code, type,
8275 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8276 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8277 arg01, TREE_OPERAND (arg00, 1)),
8278 fold_convert (TREE_TYPE (arg0),
8281 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8282 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8284 fold (build2 (code, type,
8285 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8286 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8287 arg00, TREE_OPERAND (arg01, 1)),
8288 fold_convert (TREE_TYPE (arg0),
8293 /* If this is an NE or EQ comparison of zero against the result of a
8294 signed MOD operation whose second operand is a power of 2, make
8295 the MOD operation unsigned since it is simpler and equivalent. */
8296 if ((code == NE_EXPR || code == EQ_EXPR)
8297 && integer_zerop (arg1)
8298 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8299 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8300 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8301 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8302 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8303 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8305 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8306 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8307 fold_convert (newtype,
8308 TREE_OPERAND (arg0, 0)),
8309 fold_convert (newtype,
8310 TREE_OPERAND (arg0, 1))));
8312 return fold (build2 (code, type, newmod,
8313 fold_convert (newtype, arg1)));
8316 /* If this is an NE comparison of zero with an AND of one, remove the
8317 comparison since the AND will give the correct value. */
8318 if (code == NE_EXPR && integer_zerop (arg1)
8319 && TREE_CODE (arg0) == BIT_AND_EXPR
8320 && integer_onep (TREE_OPERAND (arg0, 1)))
8321 return fold_convert (type, arg0);
8323 /* If we have (A & C) == C where C is a power of 2, convert this into
8324 (A & C) != 0. Similarly for NE_EXPR. */
8325 if ((code == EQ_EXPR || code == NE_EXPR)
8326 && TREE_CODE (arg0) == BIT_AND_EXPR
8327 && integer_pow2p (TREE_OPERAND (arg0, 1))
8328 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8329 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8330 arg0, fold_convert (TREE_TYPE (arg0),
8331 integer_zero_node)));
8333 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8334 2, then fold the expression into shifts and logical operations. */
8335 tem = fold_single_bit_test (code, arg0, arg1, type);
8339 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8340 Similarly for NE_EXPR. */
8341 if ((code == EQ_EXPR || code == NE_EXPR)
8342 && TREE_CODE (arg0) == BIT_AND_EXPR
8343 && TREE_CODE (arg1) == INTEGER_CST
8344 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8347 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8348 arg1, build1 (BIT_NOT_EXPR,
8349 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8350 TREE_OPERAND (arg0, 1))));
8351 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8352 if (integer_nonzerop (dandnotc))
8353 return omit_one_operand (type, rslt, arg0);
8356 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8357 Similarly for NE_EXPR. */
8358 if ((code == EQ_EXPR || code == NE_EXPR)
8359 && TREE_CODE (arg0) == BIT_IOR_EXPR
8360 && TREE_CODE (arg1) == INTEGER_CST
8361 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8364 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8365 TREE_OPERAND (arg0, 1),
8366 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8367 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8368 if (integer_nonzerop (candnotd))
8369 return omit_one_operand (type, rslt, arg0);
8372 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8373 and similarly for >= into !=. */
8374 if ((code == LT_EXPR || code == GE_EXPR)
8375 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8376 && TREE_CODE (arg1) == LSHIFT_EXPR
8377 && integer_onep (TREE_OPERAND (arg1, 0)))
8378 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8379 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8380 TREE_OPERAND (arg1, 1)),
8381 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8383 else if ((code == LT_EXPR || code == GE_EXPR)
8384 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8385 && (TREE_CODE (arg1) == NOP_EXPR
8386 || TREE_CODE (arg1) == CONVERT_EXPR)
8387 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8388 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8390 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8391 fold_convert (TREE_TYPE (arg0),
8392 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8393 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8395 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8397 /* Simplify comparison of something with itself. (For IEEE
8398 floating-point, we can only do some of these simplifications.) */
8399 if (operand_equal_p (arg0, arg1, 0))
8404 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8405 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8406 return constant_boolean_node (1, type);
8411 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8412 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8413 return constant_boolean_node (1, type);
8414 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8417 /* For NE, we can only do this simplification if integer
8418 or we don't honor IEEE floating point NaNs. */
8419 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8420 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8422 /* ... fall through ... */
8425 return constant_boolean_node (0, type);
8431 /* If we are comparing an expression that just has comparisons
8432 of two integer values, arithmetic expressions of those comparisons,
8433 and constants, we can simplify it. There are only three cases
8434 to check: the two values can either be equal, the first can be
8435 greater, or the second can be greater. Fold the expression for
8436 those three values. Since each value must be 0 or 1, we have
8437 eight possibilities, each of which corresponds to the constant 0
8438 or 1 or one of the six possible comparisons.
8440 This handles common cases like (a > b) == 0 but also handles
8441 expressions like ((x > y) - (y > x)) > 0, which supposedly
8442 occur in macroized code. */
8444 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8446 tree cval1 = 0, cval2 = 0;
8449 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8450 /* Don't handle degenerate cases here; they should already
8451 have been handled anyway. */
8452 && cval1 != 0 && cval2 != 0
8453 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8454 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8455 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8456 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8457 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8458 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8459 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8461 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8462 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8464 /* We can't just pass T to eval_subst in case cval1 or cval2
8465 was the same as ARG1. */
8468 = fold (build2 (code, type,
8469 eval_subst (arg0, cval1, maxval,
8473 = fold (build2 (code, type,
8474 eval_subst (arg0, cval1, maxval,
8478 = fold (build2 (code, type,
8479 eval_subst (arg0, cval1, minval,
8483 /* All three of these results should be 0 or 1. Confirm they
8484 are. Then use those values to select the proper code
8487 if ((integer_zerop (high_result)
8488 || integer_onep (high_result))
8489 && (integer_zerop (equal_result)
8490 || integer_onep (equal_result))
8491 && (integer_zerop (low_result)
8492 || integer_onep (low_result)))
8494 /* Make a 3-bit mask with the high-order bit being the
8495 value for `>', the next for '=', and the low for '<'. */
8496 switch ((integer_onep (high_result) * 4)
8497 + (integer_onep (equal_result) * 2)
8498 + integer_onep (low_result))
8502 return omit_one_operand (type, integer_zero_node, arg0);
8523 return omit_one_operand (type, integer_one_node, arg0);
8526 tem = build2 (code, type, cval1, cval2);
8528 return save_expr (tem);
8535 /* If this is a comparison of a field, we may be able to simplify it. */
8536 if (((TREE_CODE (arg0) == COMPONENT_REF
8537 && lang_hooks.can_use_bit_fields_p ())
8538 || TREE_CODE (arg0) == BIT_FIELD_REF)
8539 && (code == EQ_EXPR || code == NE_EXPR)
8540 /* Handle the constant case even without -O
8541 to make sure the warnings are given. */
8542 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8544 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8549 /* If this is a comparison of complex values and either or both sides
8550 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8551 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8552 This may prevent needless evaluations. */
8553 if ((code == EQ_EXPR || code == NE_EXPR)
8554 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8555 && (TREE_CODE (arg0) == COMPLEX_EXPR
8556 || TREE_CODE (arg1) == COMPLEX_EXPR
8557 || TREE_CODE (arg0) == COMPLEX_CST
8558 || TREE_CODE (arg1) == COMPLEX_CST))
8560 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8561 tree real0, imag0, real1, imag1;
8563 arg0 = save_expr (arg0);
8564 arg1 = save_expr (arg1);
8565 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8566 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8567 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8568 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8570 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8573 fold (build2 (code, type, real0, real1)),
8574 fold (build2 (code, type, imag0, imag1))));
8577 /* Optimize comparisons of strlen vs zero to a compare of the
8578 first character of the string vs zero. To wit,
8579 strlen(ptr) == 0 => *ptr == 0
8580 strlen(ptr) != 0 => *ptr != 0
8581 Other cases should reduce to one of these two (or a constant)
8582 due to the return value of strlen being unsigned. */
8583 if ((code == EQ_EXPR || code == NE_EXPR)
8584 && integer_zerop (arg1)
8585 && TREE_CODE (arg0) == CALL_EXPR)
8587 tree fndecl = get_callee_fndecl (arg0);
8591 && DECL_BUILT_IN (fndecl)
8592 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8593 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8594 && (arglist = TREE_OPERAND (arg0, 1))
8595 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8596 && ! TREE_CHAIN (arglist))
8597 return fold (build2 (code, type,
8598 build1 (INDIRECT_REF, char_type_node,
8599 TREE_VALUE (arglist)),
8600 fold_convert (char_type_node,
8601 integer_zero_node)));
8604 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8605 into a single range test. */
8606 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8607 && TREE_CODE (arg1) == INTEGER_CST
8608 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8609 && !integer_zerop (TREE_OPERAND (arg0, 1))
8610 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8611 && !TREE_OVERFLOW (arg1))
8613 t1 = fold_div_compare (code, type, arg0, arg1);
8614 if (t1 != NULL_TREE)
8618 if ((code == EQ_EXPR || code == NE_EXPR)
8619 && !TREE_SIDE_EFFECTS (arg0)
8620 && integer_zerop (arg1)
8621 && tree_expr_nonzero_p (arg0))
8622 return constant_boolean_node (code==NE_EXPR, type);
8624 t1 = fold_relational_const (code, type, arg0, arg1);
8625 return t1 == NULL_TREE ? t : t1;
8627 case UNORDERED_EXPR:
8635 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8637 t1 = fold_relational_const (code, type, arg0, arg1);
8638 if (t1 != NULL_TREE)
8642 /* If the first operand is NaN, the result is constant. */
8643 if (TREE_CODE (arg0) == REAL_CST
8644 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8645 && (code != LTGT_EXPR || ! flag_trapping_math))
8647 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8650 return omit_one_operand (type, t1, arg1);
8653 /* If the second operand is NaN, the result is constant. */
8654 if (TREE_CODE (arg1) == REAL_CST
8655 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8656 && (code != LTGT_EXPR || ! flag_trapping_math))
8658 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8661 return omit_one_operand (type, t1, arg0);
8664 /* Simplify unordered comparison of something with itself. */
8665 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8666 && operand_equal_p (arg0, arg1, 0))
8667 return constant_boolean_node (1, type);
8669 if (code == LTGT_EXPR
8670 && !flag_trapping_math
8671 && operand_equal_p (arg0, arg1, 0))
8672 return constant_boolean_node (0, type);
8674 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8676 tree targ0 = strip_float_extensions (arg0);
8677 tree targ1 = strip_float_extensions (arg1);
8678 tree newtype = TREE_TYPE (targ0);
8680 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8681 newtype = TREE_TYPE (targ1);
8683 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8684 return fold (build2 (code, type, fold_convert (newtype, targ0),
8685 fold_convert (newtype, targ1)));
8691 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8692 so all simple results must be passed through pedantic_non_lvalue. */
8693 if (TREE_CODE (arg0) == INTEGER_CST)
8695 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8696 /* Only optimize constant conditions when the selected branch
8697 has the same type as the COND_EXPR. This avoids optimizing
8698 away "c ? x : throw", where the throw has a void type. */
8699 if (! VOID_TYPE_P (TREE_TYPE (tem))
8700 || VOID_TYPE_P (type))
8701 return pedantic_non_lvalue (tem);
8704 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8705 return pedantic_omit_one_operand (type, arg1, arg0);
8707 /* If we have A op B ? A : C, we may be able to convert this to a
8708 simpler expression, depending on the operation and the values
8709 of B and C. Signed zeros prevent all of these transformations,
8710 for reasons given above each one.
8712 Also try swapping the arguments and inverting the conditional. */
8713 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8714 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8715 arg1, TREE_OPERAND (arg0, 1))
8716 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8718 tem = fold_cond_expr_with_comparison (type, arg0,
8719 TREE_OPERAND (t, 1),
8720 TREE_OPERAND (t, 2));
8725 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8726 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8727 TREE_OPERAND (t, 2),
8728 TREE_OPERAND (arg0, 1))
8729 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8731 tem = invert_truthvalue (arg0);
8732 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8734 tem = fold_cond_expr_with_comparison (type, tem,
8735 TREE_OPERAND (t, 2),
8736 TREE_OPERAND (t, 1));
8742 /* If the second operand is simpler than the third, swap them
8743 since that produces better jump optimization results. */
8744 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8745 TREE_OPERAND (t, 2), false))
8747 /* See if this can be inverted. If it can't, possibly because
8748 it was a floating-point inequality comparison, don't do
8750 tem = invert_truthvalue (arg0);
8752 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8753 return fold (build3 (code, type, tem,
8754 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8757 /* Convert A ? 1 : 0 to simply A. */
8758 if (integer_onep (TREE_OPERAND (t, 1))
8759 && integer_zerop (TREE_OPERAND (t, 2))
8760 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8761 call to fold will try to move the conversion inside
8762 a COND, which will recurse. In that case, the COND_EXPR
8763 is probably the best choice, so leave it alone. */
8764 && type == TREE_TYPE (arg0))
8765 return pedantic_non_lvalue (arg0);
8767 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8768 over COND_EXPR in cases such as floating point comparisons. */
8769 if (integer_zerop (TREE_OPERAND (t, 1))
8770 && integer_onep (TREE_OPERAND (t, 2))
8771 && truth_value_p (TREE_CODE (arg0)))
8772 return pedantic_non_lvalue (fold_convert (type,
8773 invert_truthvalue (arg0)));
8775 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8776 if (TREE_CODE (arg0) == LT_EXPR
8777 && integer_zerop (TREE_OPERAND (arg0, 1))
8778 && integer_zerop (TREE_OPERAND (t, 2))
8779 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8780 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8781 TREE_TYPE (tem), tem, arg1)));
8783 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8784 already handled above. */
8785 if (TREE_CODE (arg0) == BIT_AND_EXPR
8786 && integer_onep (TREE_OPERAND (arg0, 1))
8787 && integer_zerop (TREE_OPERAND (t, 2))
8788 && integer_pow2p (arg1))
8790 tree tem = TREE_OPERAND (arg0, 0);
8792 if (TREE_CODE (tem) == RSHIFT_EXPR
8793 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
8794 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8795 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8796 return fold (build2 (BIT_AND_EXPR, type,
8797 TREE_OPERAND (tem, 0), arg1));
8800 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8801 is probably obsolete because the first operand should be a
8802 truth value (that's why we have the two cases above), but let's
8803 leave it in until we can confirm this for all front-ends. */
8804 if (integer_zerop (TREE_OPERAND (t, 2))
8805 && TREE_CODE (arg0) == NE_EXPR
8806 && integer_zerop (TREE_OPERAND (arg0, 1))
8807 && integer_pow2p (arg1)
8808 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8809 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8810 arg1, OEP_ONLY_CONST))
8811 return pedantic_non_lvalue (fold_convert (type,
8812 TREE_OPERAND (arg0, 0)));
8814 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8815 if (integer_zerop (TREE_OPERAND (t, 2))
8816 && truth_value_p (TREE_CODE (arg0))
8817 && truth_value_p (TREE_CODE (arg1)))
8818 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8820 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8821 if (integer_onep (TREE_OPERAND (t, 2))
8822 && truth_value_p (TREE_CODE (arg0))
8823 && truth_value_p (TREE_CODE (arg1)))
8825 /* Only perform transformation if ARG0 is easily inverted. */
8826 tem = invert_truthvalue (arg0);
8827 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8828 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8831 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8832 if (integer_zerop (arg1)
8833 && truth_value_p (TREE_CODE (arg0))
8834 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8836 /* Only perform transformation if ARG0 is easily inverted. */
8837 tem = invert_truthvalue (arg0);
8838 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8839 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8840 TREE_OPERAND (t, 2)));
8843 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8844 if (integer_onep (arg1)
8845 && truth_value_p (TREE_CODE (arg0))
8846 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8847 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8848 TREE_OPERAND (t, 2)));
8853 /* When pedantic, a compound expression can be neither an lvalue
8854 nor an integer constant expression. */
8855 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8857 /* Don't let (0, 0) be null pointer constant. */
8858 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8859 : fold_convert (type, arg1);
8860 return pedantic_non_lvalue (tem);
8864 return build_complex (type, arg0, arg1);
8868 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8870 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8871 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8872 TREE_OPERAND (arg0, 1));
8873 else if (TREE_CODE (arg0) == COMPLEX_CST)
8874 return TREE_REALPART (arg0);
8875 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8876 return fold (build2 (TREE_CODE (arg0), type,
8877 fold (build1 (REALPART_EXPR, type,
8878 TREE_OPERAND (arg0, 0))),
8879 fold (build1 (REALPART_EXPR, type,
8880 TREE_OPERAND (arg0, 1)))));
8884 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8885 return fold_convert (type, integer_zero_node);
8886 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8887 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8888 TREE_OPERAND (arg0, 0));
8889 else if (TREE_CODE (arg0) == COMPLEX_CST)
8890 return TREE_IMAGPART (arg0);
8891 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8892 return fold (build2 (TREE_CODE (arg0), type,
8893 fold (build1 (IMAGPART_EXPR, type,
8894 TREE_OPERAND (arg0, 0))),
8895 fold (build1 (IMAGPART_EXPR, type,
8896 TREE_OPERAND (arg0, 1)))));
8899 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8901 case CLEANUP_POINT_EXPR:
8902 if (! has_cleanups (arg0))
8903 return TREE_OPERAND (t, 0);
8906 enum tree_code code0 = TREE_CODE (arg0);
8907 int kind0 = TREE_CODE_CLASS (code0);
8908 tree arg00 = TREE_OPERAND (arg0, 0);
8911 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8912 return fold (build1 (code0, type,
8913 fold (build1 (CLEANUP_POINT_EXPR,
8914 TREE_TYPE (arg00), arg00))));
8916 if (kind0 == '<' || kind0 == '2'
8917 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8918 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8919 || code0 == TRUTH_XOR_EXPR)
8921 arg01 = TREE_OPERAND (arg0, 1);
8923 if (TREE_CONSTANT (arg00)
8924 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8925 && ! has_cleanups (arg00)))
8926 return fold (build2 (code0, type, arg00,
8927 fold (build1 (CLEANUP_POINT_EXPR,
8928 TREE_TYPE (arg01), arg01))));
8930 if (TREE_CONSTANT (arg01))
8931 return fold (build2 (code0, type,
8932 fold (build1 (CLEANUP_POINT_EXPR,
8933 TREE_TYPE (arg00), arg00)),
8941 /* Check for a built-in function. */
8942 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8943 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8945 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8947 tree tmp = fold_builtin (t, false);
8955 } /* switch (code) */
8958 #ifdef ENABLE_FOLD_CHECKING
8961 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8962 static void fold_check_failed (tree, tree);
8963 void print_fold_checksum (tree);
8965 /* When --enable-checking=fold, compute a digest of expr before
8966 and after actual fold call to see if fold did not accidentally
8967 change original expr. */
8974 unsigned char checksum_before[16], checksum_after[16];
8977 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8978 md5_init_ctx (&ctx);
8979 fold_checksum_tree (expr, &ctx, ht);
8980 md5_finish_ctx (&ctx, checksum_before);
8983 ret = fold_1 (expr);
8985 md5_init_ctx (&ctx);
8986 fold_checksum_tree (expr, &ctx, ht);
8987 md5_finish_ctx (&ctx, checksum_after);
8990 if (memcmp (checksum_before, checksum_after, 16))
8991 fold_check_failed (expr, ret);
8997 print_fold_checksum (tree expr)
9000 unsigned char checksum[16], cnt;
9003 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9004 md5_init_ctx (&ctx);
9005 fold_checksum_tree (expr, &ctx, ht);
9006 md5_finish_ctx (&ctx, checksum);
9008 for (cnt = 0; cnt < 16; ++cnt)
9009 fprintf (stderr, "%02x", checksum[cnt]);
9010 putc ('\n', stderr);
9014 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9016 internal_error ("fold check: original tree changed by fold");
9020 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9023 enum tree_code code;
9024 char buf[sizeof (struct tree_decl)];
9027 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
9028 > sizeof (struct tree_decl)
9029 || sizeof (struct tree_type) > sizeof (struct tree_decl))
9033 slot = htab_find_slot (ht, expr, INSERT);
9037 code = TREE_CODE (expr);
9038 if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9040 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9041 memcpy (buf, expr, tree_size (expr));
9043 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9045 else if (TREE_CODE_CLASS (code) == 't'
9046 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9048 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9049 memcpy (buf, expr, tree_size (expr));
9051 TYPE_POINTER_TO (expr) = NULL;
9052 TYPE_REFERENCE_TO (expr) = NULL;
9054 md5_process_bytes (expr, tree_size (expr), ctx);
9055 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9056 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9057 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9058 switch (TREE_CODE_CLASS (code))
9064 md5_process_bytes (TREE_STRING_POINTER (expr),
9065 TREE_STRING_LENGTH (expr), ctx);
9068 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9069 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9072 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9082 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9083 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9086 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9087 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9099 len = first_rtl_op (code);
9100 for (i = 0; i < len; ++i)
9101 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9104 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9105 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9106 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9107 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9108 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9109 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9110 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9111 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9112 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9113 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9114 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9117 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9118 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9119 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9120 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9121 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9122 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9123 if (INTEGRAL_TYPE_P (expr)
9124 || SCALAR_FLOAT_TYPE_P (expr))
9126 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9127 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9129 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9130 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9131 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9140 /* Perform constant folding and related simplification of initializer
9141 expression EXPR. This behaves identically to "fold" but ignores
9142 potential run-time traps and exceptions that fold must preserve. */
9145 fold_initializer (tree expr)
9147 int saved_signaling_nans = flag_signaling_nans;
9148 int saved_trapping_math = flag_trapping_math;
9149 int saved_trapv = flag_trapv;
9152 flag_signaling_nans = 0;
9153 flag_trapping_math = 0;
9156 result = fold (expr);
9158 flag_signaling_nans = saved_signaling_nans;
9159 flag_trapping_math = saved_trapping_math;
9160 flag_trapv = saved_trapv;
9165 /* Determine if first argument is a multiple of second argument. Return 0 if
9166 it is not, or we cannot easily determined it to be.
9168 An example of the sort of thing we care about (at this point; this routine
9169 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9170 fold cases do now) is discovering that
9172 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9178 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9180 This code also handles discovering that
9182 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9184 is a multiple of 8 so we don't have to worry about dealing with a
9187 Note that we *look* inside a SAVE_EXPR only to determine how it was
9188 calculated; it is not safe for fold to do much of anything else with the
9189 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9190 at run time. For example, the latter example above *cannot* be implemented
9191 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9192 evaluation time of the original SAVE_EXPR is not necessarily the same at
9193 the time the new expression is evaluated. The only optimization of this
9194 sort that would be valid is changing
9196 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9200 SAVE_EXPR (I) * SAVE_EXPR (J)
9202 (where the same SAVE_EXPR (J) is used in the original and the
9203 transformed version). */
9206 multiple_of_p (tree type, tree top, tree bottom)
9208 if (operand_equal_p (top, bottom, 0))
9211 if (TREE_CODE (type) != INTEGER_TYPE)
9214 switch (TREE_CODE (top))
9217 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9218 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9222 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9223 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9226 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9230 op1 = TREE_OPERAND (top, 1);
9231 /* const_binop may not detect overflow correctly,
9232 so check for it explicitly here. */
9233 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9234 > TREE_INT_CST_LOW (op1)
9235 && TREE_INT_CST_HIGH (op1) == 0
9236 && 0 != (t1 = fold_convert (type,
9237 const_binop (LSHIFT_EXPR,
9240 && ! TREE_OVERFLOW (t1))
9241 return multiple_of_p (type, t1, bottom);
9246 /* Can't handle conversions from non-integral or wider integral type. */
9247 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9248 || (TYPE_PRECISION (type)
9249 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9252 /* .. fall through ... */
9255 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9258 if (TREE_CODE (bottom) != INTEGER_CST
9259 || (TYPE_UNSIGNED (type)
9260 && (tree_int_cst_sgn (top) < 0
9261 || tree_int_cst_sgn (bottom) < 0)))
9263 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9271 /* Return true if `t' is known to be non-negative. */
9274 tree_expr_nonnegative_p (tree t)
9276 switch (TREE_CODE (t))
9282 return tree_int_cst_sgn (t) >= 0;
9285 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9288 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9289 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9290 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9292 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9293 both unsigned and at least 2 bits shorter than the result. */
9294 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9295 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9296 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9298 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9299 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9300 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9301 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9303 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9304 TYPE_PRECISION (inner2)) + 1;
9305 return prec < TYPE_PRECISION (TREE_TYPE (t));
9311 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9313 /* x * x for floating point x is always non-negative. */
9314 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9316 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9317 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9320 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9321 both unsigned and their total bits is shorter than the result. */
9322 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9323 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9324 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9326 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9327 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9328 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9329 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9330 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9331 < TYPE_PRECISION (TREE_TYPE (t));
9335 case TRUNC_DIV_EXPR:
9337 case FLOOR_DIV_EXPR:
9338 case ROUND_DIV_EXPR:
9339 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9340 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9342 case TRUNC_MOD_EXPR:
9344 case FLOOR_MOD_EXPR:
9345 case ROUND_MOD_EXPR:
9346 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9349 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9350 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9353 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9354 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9357 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9358 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9362 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9363 tree outer_type = TREE_TYPE (t);
9365 if (TREE_CODE (outer_type) == REAL_TYPE)
9367 if (TREE_CODE (inner_type) == REAL_TYPE)
9368 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9369 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9371 if (TYPE_UNSIGNED (inner_type))
9373 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9376 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9378 if (TREE_CODE (inner_type) == REAL_TYPE)
9379 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9380 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9381 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9382 && TYPE_UNSIGNED (inner_type);
9388 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9389 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9391 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9393 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9394 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9396 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9397 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9399 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9401 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9403 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9404 case NON_LVALUE_EXPR:
9405 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9407 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9411 tree temp = TARGET_EXPR_SLOT (t);
9412 t = TARGET_EXPR_INITIAL (t);
9414 /* If the initializer is non-void, then it's a normal expression
9415 that will be assigned to the slot. */
9416 if (!VOID_TYPE_P (t))
9417 return tree_expr_nonnegative_p (t);
9419 /* Otherwise, the initializer sets the slot in some way. One common
9420 way is an assignment statement at the end of the initializer. */
9423 if (TREE_CODE (t) == BIND_EXPR)
9424 t = expr_last (BIND_EXPR_BODY (t));
9425 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9426 || TREE_CODE (t) == TRY_CATCH_EXPR)
9427 t = expr_last (TREE_OPERAND (t, 0));
9428 else if (TREE_CODE (t) == STATEMENT_LIST)
9433 if (TREE_CODE (t) == MODIFY_EXPR
9434 && TREE_OPERAND (t, 0) == temp)
9435 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9442 tree fndecl = get_callee_fndecl (t);
9443 tree arglist = TREE_OPERAND (t, 1);
9445 && DECL_BUILT_IN (fndecl)
9446 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9447 switch (DECL_FUNCTION_CODE (fndecl))
9449 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9450 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9451 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9452 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9454 CASE_BUILTIN_F (BUILT_IN_ACOS)
9455 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9456 CASE_BUILTIN_F (BUILT_IN_CABS)
9457 CASE_BUILTIN_F (BUILT_IN_COSH)
9458 CASE_BUILTIN_F (BUILT_IN_ERFC)
9459 CASE_BUILTIN_F (BUILT_IN_EXP)
9460 CASE_BUILTIN_F (BUILT_IN_EXP10)
9461 CASE_BUILTIN_F (BUILT_IN_EXP2)
9462 CASE_BUILTIN_F (BUILT_IN_FABS)
9463 CASE_BUILTIN_F (BUILT_IN_FDIM)
9464 CASE_BUILTIN_F (BUILT_IN_FREXP)
9465 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9466 CASE_BUILTIN_F (BUILT_IN_POW10)
9467 CASE_BUILTIN_I (BUILT_IN_FFS)
9468 CASE_BUILTIN_I (BUILT_IN_PARITY)
9469 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9473 CASE_BUILTIN_F (BUILT_IN_SQRT)
9474 /* sqrt(-0.0) is -0.0. */
9475 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9477 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9479 CASE_BUILTIN_F (BUILT_IN_ASINH)
9480 CASE_BUILTIN_F (BUILT_IN_ATAN)
9481 CASE_BUILTIN_F (BUILT_IN_ATANH)
9482 CASE_BUILTIN_F (BUILT_IN_CBRT)
9483 CASE_BUILTIN_F (BUILT_IN_CEIL)
9484 CASE_BUILTIN_F (BUILT_IN_ERF)
9485 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9486 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9487 CASE_BUILTIN_F (BUILT_IN_FMOD)
9488 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9489 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9490 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9491 CASE_BUILTIN_F (BUILT_IN_LRINT)
9492 CASE_BUILTIN_F (BUILT_IN_LROUND)
9493 CASE_BUILTIN_F (BUILT_IN_MODF)
9494 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9495 CASE_BUILTIN_F (BUILT_IN_POW)
9496 CASE_BUILTIN_F (BUILT_IN_RINT)
9497 CASE_BUILTIN_F (BUILT_IN_ROUND)
9498 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9499 CASE_BUILTIN_F (BUILT_IN_SINH)
9500 CASE_BUILTIN_F (BUILT_IN_TANH)
9501 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9502 /* True if the 1st argument is nonnegative. */
9503 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9505 CASE_BUILTIN_F (BUILT_IN_FMAX)
9506 /* True if the 1st OR 2nd arguments are nonnegative. */
9507 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9508 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9510 CASE_BUILTIN_F (BUILT_IN_FMIN)
9511 /* True if the 1st AND 2nd arguments are nonnegative. */
9512 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9513 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9515 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9516 /* True if the 2nd argument is nonnegative. */
9517 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9521 #undef CASE_BUILTIN_F
9522 #undef CASE_BUILTIN_I
9526 /* ... fall through ... */
9529 if (truth_value_p (TREE_CODE (t)))
9530 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9534 /* We don't know sign of `t', so be conservative and return false. */
9538 /* Return true when T is an address and is known to be nonzero.
9539 For floating point we further ensure that T is not denormal.
9540 Similar logic is present in nonzero_address in rtlanal.h */
9543 tree_expr_nonzero_p (tree t)
9545 tree type = TREE_TYPE (t);
9547 /* Doing something useful for floating point would need more work. */
9548 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9551 switch (TREE_CODE (t))
9554 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9555 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9558 /* We used to test for !integer_zerop here. This does not work correctly
9559 if TREE_CONSTANT_OVERFLOW (t). */
9560 return (TREE_INT_CST_LOW (t) != 0
9561 || TREE_INT_CST_HIGH (t) != 0);
9564 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9566 /* With the presence of negative values it is hard
9567 to say something. */
9568 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9569 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9571 /* One of operands must be positive and the other non-negative. */
9572 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9573 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9578 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9580 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9581 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9587 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9588 tree outer_type = TREE_TYPE (t);
9590 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9591 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9597 tree base = get_base_address (TREE_OPERAND (t, 0));
9602 /* Weak declarations may link to NULL. */
9604 return !DECL_WEAK (base);
9606 /* Constants are never weak. */
9607 if (TREE_CODE_CLASS (TREE_CODE (base)) == 'c')
9614 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9615 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9618 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9619 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9622 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9624 /* When both operands are nonzero, then MAX must be too. */
9625 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9628 /* MAX where operand 0 is positive is positive. */
9629 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9631 /* MAX where operand 1 is positive is positive. */
9632 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9633 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9640 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9643 case NON_LVALUE_EXPR:
9644 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9647 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9648 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9656 /* See if we are applying CODE, a relational to the highest or lowest
9657 possible integer of TYPE. If so, then the result is a compile
9661 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9666 enum tree_code code = *code_p;
9667 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9669 if (TREE_CODE (op1) == INTEGER_CST
9670 && ! TREE_CONSTANT_OVERFLOW (op1)
9671 && width <= HOST_BITS_PER_WIDE_INT
9672 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9673 || POINTER_TYPE_P (TREE_TYPE (op1))))
9675 unsigned HOST_WIDE_INT signed_max;
9676 unsigned HOST_WIDE_INT max, min;
9678 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9680 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9682 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9688 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9691 if (TREE_INT_CST_HIGH (op1) == 0
9692 && TREE_INT_CST_LOW (op1) == max)
9696 return omit_one_operand (type, integer_zero_node, op0);
9702 return omit_one_operand (type, integer_one_node, op0);
9708 /* The GE_EXPR and LT_EXPR cases above are not normally
9709 reached because of previous transformations. */
9714 else if (TREE_INT_CST_HIGH (op1) == 0
9715 && TREE_INT_CST_LOW (op1) == max - 1)
9720 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9724 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9729 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9730 && TREE_INT_CST_LOW (op1) == min)
9734 return omit_one_operand (type, integer_zero_node, op0);
9741 return omit_one_operand (type, integer_one_node, op0);
9750 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9751 && TREE_INT_CST_LOW (op1) == min + 1)
9756 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9760 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9766 else if (TREE_INT_CST_HIGH (op1) == 0
9767 && TREE_INT_CST_LOW (op1) == signed_max
9768 && TYPE_UNSIGNED (TREE_TYPE (op1))
9769 /* signed_type does not work on pointer types. */
9770 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9772 /* The following case also applies to X < signed_max+1
9773 and X >= signed_max+1 because previous transformations. */
9774 if (code == LE_EXPR || code == GT_EXPR)
9776 tree st0, st1, exp, retval;
9777 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9778 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9780 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9782 fold_convert (st0, op0),
9783 fold_convert (st1, integer_zero_node));
9786 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9788 TREE_OPERAND (exp, 0),
9789 TREE_OPERAND (exp, 1));
9791 /* If we are in gimple form, then returning EXP would create
9792 non-gimple expressions. Clearing it is safe and insures
9793 we do not allow a non-gimple expression to escape. */
9797 return (retval ? retval : exp);
9806 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9807 attempt to fold the expression to a constant without modifying TYPE,
9810 If the expression could be simplified to a constant, then return
9811 the constant. If the expression would not be simplified to a
9812 constant, then return NULL_TREE.
9814 Note this is primarily designed to be called after gimplification
9815 of the tree structures and when at least one operand is a constant.
9816 As a result of those simplifying assumptions this routine is far
9817 simpler than the generic fold routine. */
9820 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9828 /* If this is a commutative operation, and ARG0 is a constant, move it
9829 to ARG1 to reduce the number of tests below. */
9830 if (commutative_tree_code (code)
9831 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9838 /* If either operand is a complex type, extract its real component. */
9839 if (TREE_CODE (op0) == COMPLEX_CST)
9840 subop0 = TREE_REALPART (op0);
9844 if (TREE_CODE (op1) == COMPLEX_CST)
9845 subop1 = TREE_REALPART (op1);
9849 /* Note if either argument is not a real or integer constant.
9850 With a few exceptions, simplification is limited to cases
9851 where both arguments are constants. */
9852 if ((TREE_CODE (subop0) != INTEGER_CST
9853 && TREE_CODE (subop0) != REAL_CST)
9854 || (TREE_CODE (subop1) != INTEGER_CST
9855 && TREE_CODE (subop1) != REAL_CST))
9861 /* (plus (address) (const_int)) is a constant. */
9862 if (TREE_CODE (op0) == PLUS_EXPR
9863 && TREE_CODE (op1) == INTEGER_CST
9864 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9865 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9866 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9868 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9870 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9871 const_binop (PLUS_EXPR, op1,
9872 TREE_OPERAND (op0, 1), 0));
9880 /* Both arguments are constants. Simplify. */
9881 tem = const_binop (code, op0, op1, 0);
9882 if (tem != NULL_TREE)
9884 /* The return value should always have the same type as
9885 the original expression. */
9886 if (TREE_TYPE (tem) != type)
9887 tem = fold_convert (type, tem);
9894 /* Fold &x - &x. This can happen from &x.foo - &x.
9895 This is unsafe for certain floats even in non-IEEE formats.
9896 In IEEE, it is unsafe because it does wrong for NaNs.
9897 Also note that operand_equal_p is always false if an
9898 operand is volatile. */
9899 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9900 return fold_convert (type, integer_zero_node);
9906 /* Special case multiplication or bitwise AND where one argument
9908 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9909 return omit_one_operand (type, op1, op0);
9911 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9912 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9913 && real_zerop (op1))
9914 return omit_one_operand (type, op1, op0);
9919 /* Special case when we know the result will be all ones. */
9920 if (integer_all_onesp (op1))
9921 return omit_one_operand (type, op1, op0);
9925 case TRUNC_DIV_EXPR:
9926 case ROUND_DIV_EXPR:
9927 case FLOOR_DIV_EXPR:
9929 case EXACT_DIV_EXPR:
9930 case TRUNC_MOD_EXPR:
9931 case ROUND_MOD_EXPR:
9932 case FLOOR_MOD_EXPR:
9935 /* Division by zero is undefined. */
9936 if (integer_zerop (op1))
9939 if (TREE_CODE (op1) == REAL_CST
9940 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9941 && real_zerop (op1))
9947 if (INTEGRAL_TYPE_P (type)
9948 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9949 return omit_one_operand (type, op1, op0);
9954 if (INTEGRAL_TYPE_P (type)
9955 && TYPE_MAX_VALUE (type)
9956 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9957 return omit_one_operand (type, op1, op0);
9962 /* Optimize -1 >> x for arithmetic right shifts. */
9963 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9964 return omit_one_operand (type, op0, op1);
9965 /* ... fall through ... */
9968 if (integer_zerop (op0))
9969 return omit_one_operand (type, op0, op1);
9971 /* Since negative shift count is not well-defined, don't
9972 try to compute it in the compiler. */
9973 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9980 /* -1 rotated either direction by any amount is still -1. */
9981 if (integer_all_onesp (op0))
9982 return omit_one_operand (type, op0, op1);
9984 /* 0 rotated either direction by any amount is still zero. */
9985 if (integer_zerop (op0))
9986 return omit_one_operand (type, op0, op1);
9992 return build_complex (type, op0, op1);
10001 /* If one arg is a real or integer constant, put it last. */
10002 if ((TREE_CODE (op0) == INTEGER_CST
10003 && TREE_CODE (op1) != INTEGER_CST)
10004 || (TREE_CODE (op0) == REAL_CST
10005 && TREE_CODE (op0) != REAL_CST))
10012 code = swap_tree_comparison (code);
10015 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10016 This transformation affects the cases which are handled in later
10017 optimizations involving comparisons with non-negative constants. */
10018 if (TREE_CODE (op1) == INTEGER_CST
10019 && TREE_CODE (op0) != INTEGER_CST
10020 && tree_int_cst_sgn (op1) > 0)
10026 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10031 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10039 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10043 /* Fall through. */
10046 case UNORDERED_EXPR:
10056 return fold_relational_const (code, type, op0, op1);
10059 /* This could probably be handled. */
10062 case TRUTH_AND_EXPR:
10063 /* If second arg is constant zero, result is zero, but first arg
10064 must be evaluated. */
10065 if (integer_zerop (op1))
10066 return omit_one_operand (type, op1, op0);
10067 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10068 case will be handled here. */
10069 if (integer_zerop (op0))
10070 return omit_one_operand (type, op0, op1);
10071 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10072 return constant_boolean_node (true, type);
10075 case TRUTH_OR_EXPR:
10076 /* If second arg is constant true, result is true, but we must
10077 evaluate first arg. */
10078 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10079 return omit_one_operand (type, op1, op0);
10080 /* Likewise for first arg, but note this only occurs here for
10082 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10083 return omit_one_operand (type, op0, op1);
10084 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10085 return constant_boolean_node (false, type);
10088 case TRUTH_XOR_EXPR:
10089 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10091 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10092 return constant_boolean_node (x, type);
10101 /* Given the components of a unary expression CODE, TYPE and OP0,
10102 attempt to fold the expression to a constant without modifying
10105 If the expression could be simplified to a constant, then return
10106 the constant. If the expression would not be simplified to a
10107 constant, then return NULL_TREE.
10109 Note this is primarily designed to be called after gimplification
10110 of the tree structures and when op0 is a constant. As a result
10111 of those simplifying assumptions this routine is far simpler than
10112 the generic fold routine. */
10115 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10118 /* Make sure we have a suitable constant argument. */
10119 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10123 if (TREE_CODE (op0) == COMPLEX_CST)
10124 subop = TREE_REALPART (op0);
10128 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10137 case FIX_TRUNC_EXPR:
10138 case FIX_FLOOR_EXPR:
10139 case FIX_CEIL_EXPR:
10140 return fold_convert_const (code, type, op0);
10143 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10144 return fold_negate_const (op0, type);
10149 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10150 return fold_abs_const (op0, type);
10155 if (TREE_CODE (op0) == INTEGER_CST)
10156 return fold_not_const (op0, type);
10160 case REALPART_EXPR:
10161 if (TREE_CODE (op0) == COMPLEX_CST)
10162 return TREE_REALPART (op0);
10166 case IMAGPART_EXPR:
10167 if (TREE_CODE (op0) == COMPLEX_CST)
10168 return TREE_IMAGPART (op0);
10173 if (TREE_CODE (op0) == COMPLEX_CST
10174 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10175 return build_complex (type, TREE_REALPART (op0),
10176 negate_expr (TREE_IMAGPART (op0)));
10184 /* If EXP represents referencing an element in a constant string
10185 (either via pointer arithmetic or array indexing), return the
10186 tree representing the value accessed, otherwise return NULL. */
10189 fold_read_from_constant_string (tree exp)
10191 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10193 tree exp1 = TREE_OPERAND (exp, 0);
10197 if (TREE_CODE (exp) == INDIRECT_REF)
10198 string = string_constant (exp1, &index);
10201 tree low_bound = array_ref_low_bound (exp);
10202 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10204 /* Optimize the special-case of a zero lower bound.
10206 We convert the low_bound to sizetype to avoid some problems
10207 with constant folding. (E.g. suppose the lower bound is 1,
10208 and its mode is QI. Without the conversion,l (ARRAY
10209 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10210 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10211 if (! integer_zerop (low_bound))
10212 index = size_diffop (index, fold_convert (sizetype, low_bound));
10218 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10219 && TREE_CODE (string) == STRING_CST
10220 && TREE_CODE (index) == INTEGER_CST
10221 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10222 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10224 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10225 return fold_convert (TREE_TYPE (exp),
10226 build_int_cst (NULL_TREE,
10227 (TREE_STRING_POINTER (string)
10228 [TREE_INT_CST_LOW (index)])));
10233 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10234 an integer constant or real constant.
10236 TYPE is the type of the result. */
10239 fold_negate_const (tree arg0, tree type)
10241 tree t = NULL_TREE;
10243 if (TREE_CODE (arg0) == INTEGER_CST)
10245 unsigned HOST_WIDE_INT low;
10246 HOST_WIDE_INT high;
10247 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10248 TREE_INT_CST_HIGH (arg0),
10250 t = build_int_cst_wide (type, low, high);
10251 t = force_fit_type (t, 1,
10252 (overflow | TREE_OVERFLOW (arg0))
10253 && !TYPE_UNSIGNED (type),
10254 TREE_CONSTANT_OVERFLOW (arg0));
10256 else if (TREE_CODE (arg0) == REAL_CST)
10257 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10258 #ifdef ENABLE_CHECKING
10266 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10267 an integer constant or real constant.
10269 TYPE is the type of the result. */
10272 fold_abs_const (tree arg0, tree type)
10274 tree t = NULL_TREE;
10276 if (TREE_CODE (arg0) == INTEGER_CST)
10278 /* If the value is unsigned, then the absolute value is
10279 the same as the ordinary value. */
10280 if (TYPE_UNSIGNED (type))
10282 /* Similarly, if the value is non-negative. */
10283 else if (INT_CST_LT (integer_minus_one_node, arg0))
10285 /* If the value is negative, then the absolute value is
10289 unsigned HOST_WIDE_INT low;
10290 HOST_WIDE_INT high;
10291 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10292 TREE_INT_CST_HIGH (arg0),
10294 t = build_int_cst_wide (type, low, high);
10295 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10296 TREE_CONSTANT_OVERFLOW (arg0));
10300 else if (TREE_CODE (arg0) == REAL_CST)
10302 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10303 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10307 #ifdef ENABLE_CHECKING
10315 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10316 constant. TYPE is the type of the result. */
10319 fold_not_const (tree arg0, tree type)
10321 tree t = NULL_TREE;
10323 if (TREE_CODE (arg0) == INTEGER_CST)
10325 t = build_int_cst_wide (type,
10326 ~ TREE_INT_CST_LOW (arg0),
10327 ~ TREE_INT_CST_HIGH (arg0));
10328 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10329 TREE_CONSTANT_OVERFLOW (arg0));
10331 #ifdef ENABLE_CHECKING
10339 /* Given CODE, a relational operator, the target type, TYPE and two
10340 constant operands OP0 and OP1, return the result of the
10341 relational operation. If the result is not a compile time
10342 constant, then return NULL_TREE. */
10345 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10347 int result, invert;
10349 /* From here on, the only cases we handle are when the result is
10350 known to be a constant. */
10352 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10354 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10355 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10357 /* Handle the cases where either operand is a NaN. */
10358 if (real_isnan (c0) || real_isnan (c1))
10368 case UNORDERED_EXPR:
10382 if (flag_trapping_math)
10391 return constant_boolean_node (result, type);
10394 return constant_boolean_node (real_compare (code, c0, c1), type);
10397 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10399 To compute GT, swap the arguments and do LT.
10400 To compute GE, do LT and invert the result.
10401 To compute LE, swap the arguments, do LT and invert the result.
10402 To compute NE, do EQ and invert the result.
10404 Therefore, the code below must handle only EQ and LT. */
10406 if (code == LE_EXPR || code == GT_EXPR)
10411 code = swap_tree_comparison (code);
10414 /* Note that it is safe to invert for real values here because we
10415 have already handled the one case that it matters. */
10418 if (code == NE_EXPR || code == GE_EXPR)
10421 code = invert_tree_comparison (code, false);
10424 /* Compute a result for LT or EQ if args permit;
10425 Otherwise return T. */
10426 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10428 if (code == EQ_EXPR)
10429 result = tree_int_cst_equal (op0, op1);
10430 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10431 result = INT_CST_LT_UNSIGNED (op0, op1);
10433 result = INT_CST_LT (op0, op1);
10440 return constant_boolean_node (result, type);
10443 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10444 avoid confusing the gimplify process. */
10447 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10449 /* The size of the object is not relevant when talking about its address. */
10450 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10451 t = TREE_OPERAND (t, 0);
10453 if (TREE_CODE (t) == INDIRECT_REF)
10455 t = TREE_OPERAND (t, 0);
10456 if (TREE_TYPE (t) != ptrtype)
10457 t = build1 (NOP_EXPR, ptrtype, t);
10463 while (handled_component_p (base)
10464 || TREE_CODE (base) == REALPART_EXPR
10465 || TREE_CODE (base) == IMAGPART_EXPR)
10466 base = TREE_OPERAND (base, 0);
10468 TREE_ADDRESSABLE (base) = 1;
10470 t = build1 (ADDR_EXPR, ptrtype, t);
10477 build_fold_addr_expr (tree t)
10479 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10482 /* Builds an expression for an indirection through T, simplifying some
10486 build_fold_indirect_ref (tree t)
10488 tree type = TREE_TYPE (TREE_TYPE (t));
10493 if (TREE_CODE (sub) == ADDR_EXPR)
10495 tree op = TREE_OPERAND (sub, 0);
10496 tree optype = TREE_TYPE (op);
10498 if (lang_hooks.types_compatible_p (type, optype))
10500 /* *(foo *)&fooarray => fooarray[0] */
10501 else if (TREE_CODE (optype) == ARRAY_TYPE
10502 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10503 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10506 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10507 subtype = TREE_TYPE (sub);
10508 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10509 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10511 sub = build_fold_indirect_ref (sub);
10512 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10515 return build1 (INDIRECT_REF, type, t);
10518 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10519 whose result is ignored. The type of the returned tree need not be
10520 the same as the original expression. */
10523 fold_ignored_result (tree t)
10525 if (!TREE_SIDE_EFFECTS (t))
10526 return integer_zero_node;
10529 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10532 t = TREE_OPERAND (t, 0);
10537 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10538 t = TREE_OPERAND (t, 0);
10539 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10540 t = TREE_OPERAND (t, 1);
10546 switch (TREE_CODE (t))
10548 case COMPOUND_EXPR:
10549 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10551 t = TREE_OPERAND (t, 0);
10555 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10556 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10558 t = TREE_OPERAND (t, 0);
10571 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10572 This can only be applied to objects of a sizetype. */
10575 round_up (tree value, int divisor)
10577 tree div = NULL_TREE;
10584 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10585 have to do anything. Only do this when we are not given a const,
10586 because in that case, this check is more expensive than just
10588 if (TREE_CODE (value) != INTEGER_CST)
10590 div = build_int_cst (TREE_TYPE (value), divisor);
10592 if (multiple_of_p (TREE_TYPE (value), value, div))
10596 /* If divisor is a power of two, simplify this to bit manipulation. */
10597 if (divisor == (divisor & -divisor))
10601 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10602 value = size_binop (PLUS_EXPR, value, t);
10603 t = build_int_cst (TREE_TYPE (value), -divisor);
10604 value = size_binop (BIT_AND_EXPR, value, t);
10609 div = build_int_cst (TREE_TYPE (value), divisor);
10610 value = size_binop (CEIL_DIV_EXPR, value, div);
10611 value = size_binop (MULT_EXPR, value, div);
10617 /* Likewise, but round down. */
10620 round_down (tree value, int divisor)
10622 tree div = NULL_TREE;
10629 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10630 have to do anything. Only do this when we are not given a const,
10631 because in that case, this check is more expensive than just
10633 if (TREE_CODE (value) != INTEGER_CST)
10635 div = build_int_cst (TREE_TYPE (value), divisor);
10637 if (multiple_of_p (TREE_TYPE (value), value, div))
10641 /* If divisor is a power of two, simplify this to bit manipulation. */
10642 if (divisor == (divisor & -divisor))
10646 t = build_int_cst (TREE_TYPE (value), -divisor);
10647 value = size_binop (BIT_AND_EXPR, value, t);
10652 div = build_int_cst (TREE_TYPE (value), divisor);
10653 value = size_binop (FLOOR_DIV_EXPR, value, div);
10654 value = size_binop (MULT_EXPR, value, div);