1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static tree fold_convert_const (enum tree_code, tree, tree);
93 static enum tree_code invert_tree_comparison (enum tree_code, bool);
94 static enum comparison_code comparison_to_compcode (enum tree_code);
95 static enum tree_code compcode_to_comparison (enum comparison_code);
96 static tree combine_comparisons (enum tree_code, enum tree_code,
97 enum tree_code, tree, tree, tree);
98 static int truth_value_p (enum tree_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand (tree, tree, tree);
103 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (tree, tree, int, int, int);
105 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
106 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
107 enum machine_mode *, int *, int *,
109 static int all_ones_mask_p (tree, int);
110 static tree sign_bit_p (tree, tree);
111 static int simple_operand_p (tree);
112 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
113 static tree make_range (tree, int *, tree *, tree *);
114 static tree build_range_check (tree, tree, int, tree, tree);
115 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 static tree fold_range_test (tree);
118 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 static bool tree_expr_nonzero_p (tree);
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is non-zero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is non-zero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
202 force_fit_type (tree t, int overflowable,
203 bool overflowed, bool overflowed_const)
205 unsigned HOST_WIDE_INT low;
208 int sign_extended_type;
210 gcc_assert (TREE_CODE (t) == INTEGER_CST);
212 low = TREE_INT_CST_LOW (t);
213 high = TREE_INT_CST_HIGH (t);
215 if (POINTER_TYPE_P (TREE_TYPE (t))
216 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
219 prec = TYPE_PRECISION (TREE_TYPE (t));
220 /* Size types *are* sign extended. */
221 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
222 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
223 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
225 /* First clear all bits that are beyond the type's precision. */
227 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
229 else if (prec > HOST_BITS_PER_WIDE_INT)
230 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
234 if (prec < HOST_BITS_PER_WIDE_INT)
235 low &= ~((HOST_WIDE_INT) (-1) << prec);
238 if (!sign_extended_type)
239 /* No sign extension */;
240 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
241 /* Correct width already. */;
242 else if (prec > HOST_BITS_PER_WIDE_INT)
244 /* Sign extend top half? */
245 if (high & ((unsigned HOST_WIDE_INT)1
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
249 else if (prec == HOST_BITS_PER_WIDE_INT)
251 if ((HOST_WIDE_INT)low < 0)
256 /* Sign extend bottom half? */
257 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
260 low |= (HOST_WIDE_INT)(-1) << prec;
264 /* If the value changed, return a new node. */
265 if (overflowed || overflowed_const
266 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
268 t = build_int_cst_wide (TREE_TYPE (t), low, high);
272 || (overflowable > 0 && sign_extended_type))
275 TREE_OVERFLOW (t) = 1;
276 TREE_CONSTANT_OVERFLOW (t) = 1;
278 else if (overflowed_const)
281 TREE_CONSTANT_OVERFLOW (t) = 1;
288 /* Add two doubleword integers with doubleword result.
289 Each argument is given as two `HOST_WIDE_INT' pieces.
290 One argument is L1 and H1; the other, L2 and H2.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
294 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
295 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
296 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
298 unsigned HOST_WIDE_INT l;
302 h = h1 + h2 + (l < l1);
306 return OVERFLOW_SUM_SIGN (h1, h2, h);
309 /* Negate a doubleword integer with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
312 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
315 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
316 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
322 return (*hv & h1) < 0;
332 /* Multiply two doubleword integers with doubleword result.
333 Return nonzero if the operation overflows, assuming it's signed.
334 Each argument is given as two `HOST_WIDE_INT' pieces.
335 One argument is L1 and H1; the other, L2 and H2.
336 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
339 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
340 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
341 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
343 HOST_WIDE_INT arg1[4];
344 HOST_WIDE_INT arg2[4];
345 HOST_WIDE_INT prod[4 * 2];
346 unsigned HOST_WIDE_INT carry;
348 unsigned HOST_WIDE_INT toplow, neglow;
349 HOST_WIDE_INT tophigh, neghigh;
351 encode (arg1, l1, h1);
352 encode (arg2, l2, h2);
354 memset (prod, 0, sizeof prod);
356 for (i = 0; i < 4; i++)
359 for (j = 0; j < 4; j++)
362 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
363 carry += arg1[i] * arg2[j];
364 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 prod[k] = LOWPART (carry);
367 carry = HIGHPART (carry);
372 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
374 /* Check for overflow by calculating the top half of the answer in full;
375 it should agree with the low half's sign bit. */
376 decode (prod + 4, &toplow, &tophigh);
379 neg_double (l2, h2, &neglow, &neghigh);
380 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
384 neg_double (l1, h1, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
387 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
390 /* Shift the doubleword integer in L1, H1 left by COUNT places
391 keeping only PREC bits of result.
392 Shift right if COUNT is negative.
393 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
394 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
398 HOST_WIDE_INT count, unsigned int prec,
399 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
401 unsigned HOST_WIDE_INT signmask;
405 rshift_double (l1, h1, -count, prec, lv, hv, arith);
409 if (SHIFT_COUNT_TRUNCATED)
412 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
414 /* Shifting by the host word size is undefined according to the
415 ANSI standard, so we must handle this as a special case. */
419 else if (count >= HOST_BITS_PER_WIDE_INT)
421 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
426 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
427 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
431 /* Sign extend all bits that are beyond the precision. */
433 signmask = -((prec > HOST_BITS_PER_WIDE_INT
434 ? ((unsigned HOST_WIDE_INT) *hv
435 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
436 : (*lv >> (prec - 1))) & 1);
438 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
440 else if (prec >= HOST_BITS_PER_WIDE_INT)
442 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
443 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
448 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
449 *lv |= signmask << prec;
453 /* Shift the doubleword integer in L1, H1 right by COUNT places
454 keeping only PREC bits of result. COUNT must be positive.
455 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
456 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
460 HOST_WIDE_INT count, unsigned int prec,
461 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
464 unsigned HOST_WIDE_INT signmask;
467 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
470 if (SHIFT_COUNT_TRUNCATED)
473 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
475 /* Shifting by the host word size is undefined according to the
476 ANSI standard, so we must handle this as a special case. */
480 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
487 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
492 /* Zero / sign extend all bits that are beyond the precision. */
494 if (count >= (HOST_WIDE_INT)prec)
499 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
501 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
503 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
504 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
509 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
510 *lv |= signmask << (prec - count);
514 /* Rotate the doubleword integer in L1, H1 left by COUNT places
515 keeping only PREC bits of result.
516 Rotate right if COUNT is negative.
517 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
521 HOST_WIDE_INT count, unsigned int prec,
522 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
524 unsigned HOST_WIDE_INT s1l, s2l;
525 HOST_WIDE_INT s1h, s2h;
531 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
532 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
537 /* Rotate the doubleword integer in L1, H1 left by COUNT places
538 keeping only PREC bits of result. COUNT must be positive.
539 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
543 HOST_WIDE_INT count, unsigned int prec,
544 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
546 unsigned HOST_WIDE_INT s1l, s2l;
547 HOST_WIDE_INT s1h, s2h;
553 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
554 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
569 div_and_round_double (enum tree_code code, int uns,
570 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
571 HOST_WIDE_INT hnum_orig,
572 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
573 HOST_WIDE_INT hden_orig,
574 unsigned HOST_WIDE_INT *lquo,
575 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
579 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den[4], quo[4];
582 unsigned HOST_WIDE_INT work;
583 unsigned HOST_WIDE_INT carry = 0;
584 unsigned HOST_WIDE_INT lnum = lnum_orig;
585 HOST_WIDE_INT hnum = hnum_orig;
586 unsigned HOST_WIDE_INT lden = lden_orig;
587 HOST_WIDE_INT hden = hden_orig;
590 if (hden == 0 && lden == 0)
591 overflow = 1, lden = 1;
593 /* Calculate quotient sign and convert operands to unsigned. */
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum, hnum, &lnum, &hnum)
601 && ((HOST_WIDE_INT) lden & hden) == -1)
607 neg_double (lden, hden, &lden, &hden);
611 if (hnum == 0 && hden == 0)
612 { /* single precision */
614 /* This unsigned division rounds toward zero. */
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
628 memset (quo, 0, sizeof quo);
630 memset (num, 0, sizeof num); /* to zero 9th element */
631 memset (den, 0, sizeof den);
633 encode (num, lnum, hnum);
634 encode (den, lden, hden);
636 /* Special code for when the divisor < BASE. */
637 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
639 /* hnum != 0 already checked. */
640 for (i = 4 - 1; i >= 0; i--)
642 work = num[i] + carry * BASE;
643 quo[i] = work / lden;
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig, den_hi_sig;
652 unsigned HOST_WIDE_INT quo_est, scale;
654 /* Find the highest nonzero divisor digit. */
655 for (i = 4 - 1;; i--)
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
665 scale = BASE / (den[den_hi_sig] + 1);
667 { /* scale divisor and dividend */
669 for (i = 0; i <= 4 - 1; i++)
671 work = (num[i] * scale) + carry;
672 num[i] = LOWPART (work);
673 carry = HIGHPART (work);
678 for (i = 0; i <= 4 - 1; i++)
680 work = (den[i] * scale) + carry;
681 den[i] = LOWPART (work);
682 carry = HIGHPART (work);
683 if (den[i] != 0) den_hi_sig = i;
690 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp;
697 num_hi_sig = i + den_hi_sig + 1;
698 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
699 if (num[num_hi_sig] != den[den_hi_sig])
700 quo_est = work / den[den_hi_sig];
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp = work - quo_est * den[den_hi_sig];
707 && (den[den_hi_sig - 1] * quo_est
708 > (tmp * BASE + num[num_hi_sig - 2])))
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
716 for (j = 0; j <= den_hi_sig; j++)
718 work = quo_est * den[j] + carry;
719 carry = HIGHPART (work);
720 work = num[i + j] - LOWPART (work);
721 num[i + j] = LOWPART (work);
722 carry += HIGHPART (work) != 0;
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
730 carry = 0; /* add divisor back in */
731 for (j = 0; j <= den_hi_sig; j++)
733 work = num[i + j] + den[j] + carry;
734 carry = HIGHPART (work);
735 num[i + j] = LOWPART (work);
738 num [num_hi_sig] += carry;
741 /* Store the quotient digit. */
746 decode (quo, lquo, hquo);
749 /* If result is negative, make it so. */
751 neg_double (*lquo, *hquo, lquo, hquo);
753 /* Compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
755 neg_double (*lrem, *hrem, lrem, hrem);
756 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
761 case TRUNC_MOD_EXPR: /* round toward zero */
762 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
766 case FLOOR_MOD_EXPR: /* round toward negative infinity */
767 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
770 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
778 case CEIL_MOD_EXPR: /* round toward positive infinity */
779 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
781 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
789 case ROUND_MOD_EXPR: /* round to closest integer */
791 unsigned HOST_WIDE_INT labs_rem = *lrem;
792 HOST_WIDE_INT habs_rem = *hrem;
793 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
794 HOST_WIDE_INT habs_den = hden, htwice;
796 /* Get absolute values. */
798 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
800 neg_double (lden, hden, &labs_den, &habs_den);
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
804 labs_rem, habs_rem, <wice, &htwice);
806 if (((unsigned HOST_WIDE_INT) habs_den
807 < (unsigned HOST_WIDE_INT) htwice)
808 || (((unsigned HOST_WIDE_INT) habs_den
809 == (unsigned HOST_WIDE_INT) htwice)
810 && (labs_den < ltwice)))
814 add_double (*lquo, *hquo,
815 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
830 /* Compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
832 neg_double (*lrem, *hrem, lrem, hrem);
833 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
837 /* Return true if built-in mathematical function specified by CODE
838 preserves the sign of it argument, i.e. -f(x) == f(-x). */
841 negate_mathfn_p (enum built_in_function code)
865 /* Check whether we may negate an integer constant T without causing
869 may_negate_without_overflow_p (tree t)
871 unsigned HOST_WIDE_INT val;
875 gcc_assert (TREE_CODE (t) == INTEGER_CST);
877 type = TREE_TYPE (t);
878 if (TYPE_UNSIGNED (type))
881 prec = TYPE_PRECISION (type);
882 if (prec > HOST_BITS_PER_WIDE_INT)
884 if (TREE_INT_CST_LOW (t) != 0)
886 prec -= HOST_BITS_PER_WIDE_INT;
887 val = TREE_INT_CST_HIGH (t);
890 val = TREE_INT_CST_LOW (t);
891 if (prec < HOST_BITS_PER_WIDE_INT)
892 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
893 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
896 /* Determine whether an expression T can be cheaply negated using
897 the function negate_expr. */
900 negate_expr_p (tree t)
907 type = TREE_TYPE (t);
910 switch (TREE_CODE (t))
913 if (TYPE_UNSIGNED (type) || ! flag_trapv)
916 /* Check that -CST will not overflow type. */
917 return may_negate_without_overflow_p (t);
924 return negate_expr_p (TREE_REALPART (t))
925 && negate_expr_p (TREE_IMAGPART (t));
928 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
930 /* -(A + B) -> (-B) - A. */
931 if (negate_expr_p (TREE_OPERAND (t, 1))
932 && reorder_operands_p (TREE_OPERAND (t, 0),
933 TREE_OPERAND (t, 1)))
935 /* -(A + B) -> (-A) - B. */
936 return negate_expr_p (TREE_OPERAND (t, 0));
939 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
940 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
941 && reorder_operands_p (TREE_OPERAND (t, 0),
942 TREE_OPERAND (t, 1));
945 if (TYPE_UNSIGNED (TREE_TYPE (t)))
951 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
952 return negate_expr_p (TREE_OPERAND (t, 1))
953 || negate_expr_p (TREE_OPERAND (t, 0));
957 /* Negate -((double)float) as (double)(-float). */
958 if (TREE_CODE (type) == REAL_TYPE)
960 tree tem = strip_float_extensions (t);
962 return negate_expr_p (tem);
967 /* Negate -f(x) as f(-x). */
968 if (negate_mathfn_p (builtin_mathfn_code (t)))
969 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
973 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
974 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
976 tree op1 = TREE_OPERAND (t, 1);
977 if (TREE_INT_CST_HIGH (op1) == 0
978 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
979 == TREE_INT_CST_LOW (op1))
990 /* Given T, an expression, return the negation of T. Allow for T to be
991 null, in which case return null. */
1002 type = TREE_TYPE (t);
1003 STRIP_SIGN_NOPS (t);
1005 switch (TREE_CODE (t))
1008 tem = fold_negate_const (t, type);
1009 if (! TREE_OVERFLOW (tem)
1010 || TYPE_UNSIGNED (type)
1016 tem = fold_negate_const (t, type);
1017 /* Two's complement FP formats, such as c4x, may overflow. */
1018 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1019 return fold_convert (type, tem);
1024 tree rpart = negate_expr (TREE_REALPART (t));
1025 tree ipart = negate_expr (TREE_IMAGPART (t));
1027 if ((TREE_CODE (rpart) == REAL_CST
1028 && TREE_CODE (ipart) == REAL_CST)
1029 || (TREE_CODE (rpart) == INTEGER_CST
1030 && TREE_CODE (ipart) == INTEGER_CST))
1031 return build_complex (type, rpart, ipart);
1036 return fold_convert (type, TREE_OPERAND (t, 0));
1039 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1041 /* -(A + B) -> (-B) - A. */
1042 if (negate_expr_p (TREE_OPERAND (t, 1))
1043 && reorder_operands_p (TREE_OPERAND (t, 0),
1044 TREE_OPERAND (t, 1)))
1046 tem = negate_expr (TREE_OPERAND (t, 1));
1047 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1048 tem, TREE_OPERAND (t, 0)));
1049 return fold_convert (type, tem);
1052 /* -(A + B) -> (-A) - B. */
1053 if (negate_expr_p (TREE_OPERAND (t, 0)))
1055 tem = negate_expr (TREE_OPERAND (t, 0));
1056 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1057 tem, TREE_OPERAND (t, 1)));
1058 return fold_convert (type, tem);
1064 /* - (A - B) -> B - A */
1065 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1066 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1067 return fold_convert (type,
1068 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1069 TREE_OPERAND (t, 1),
1070 TREE_OPERAND (t, 0))));
1074 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1080 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1082 tem = TREE_OPERAND (t, 1);
1083 if (negate_expr_p (tem))
1084 return fold_convert (type,
1085 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1086 TREE_OPERAND (t, 0),
1087 negate_expr (tem))));
1088 tem = TREE_OPERAND (t, 0);
1089 if (negate_expr_p (tem))
1090 return fold_convert (type,
1091 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1093 TREE_OPERAND (t, 1))));
1098 /* Convert -((double)float) into (double)(-float). */
1099 if (TREE_CODE (type) == REAL_TYPE)
1101 tem = strip_float_extensions (t);
1102 if (tem != t && negate_expr_p (tem))
1103 return fold_convert (type, negate_expr (tem));
1108 /* Negate -f(x) as f(-x). */
1109 if (negate_mathfn_p (builtin_mathfn_code (t))
1110 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1112 tree fndecl, arg, arglist;
1114 fndecl = get_callee_fndecl (t);
1115 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1116 arglist = build_tree_list (NULL_TREE, arg);
1117 return build_function_call_expr (fndecl, arglist);
1122 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1123 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1125 tree op1 = TREE_OPERAND (t, 1);
1126 if (TREE_INT_CST_HIGH (op1) == 0
1127 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1128 == TREE_INT_CST_LOW (op1))
1130 tree ntype = TYPE_UNSIGNED (type)
1131 ? lang_hooks.types.signed_type (type)
1132 : lang_hooks.types.unsigned_type (type);
1133 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1134 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1135 return fold_convert (type, temp);
1144 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1145 return fold_convert (type, tem);
1148 /* Split a tree IN into a constant, literal and variable parts that could be
1149 combined with CODE to make IN. "constant" means an expression with
1150 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1151 commutative arithmetic operation. Store the constant part into *CONP,
1152 the literal in *LITP and return the variable part. If a part isn't
1153 present, set it to null. If the tree does not decompose in this way,
1154 return the entire tree as the variable part and the other parts as null.
1156 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1157 case, we negate an operand that was subtracted. Except if it is a
1158 literal for which we use *MINUS_LITP instead.
1160 If NEGATE_P is true, we are negating all of IN, again except a literal
1161 for which we use *MINUS_LITP instead.
1163 If IN is itself a literal or constant, return it as appropriate.
1165 Note that we do not guarantee that any of the three values will be the
1166 same type as IN, but they will have the same signedness and mode. */
1169 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1170 tree *minus_litp, int negate_p)
1178 /* Strip any conversions that don't change the machine mode or signedness. */
1179 STRIP_SIGN_NOPS (in);
1181 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1183 else if (TREE_CODE (in) == code
1184 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1185 /* We can associate addition and subtraction together (even
1186 though the C standard doesn't say so) for integers because
1187 the value is not affected. For reals, the value might be
1188 affected, so we can't. */
1189 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1190 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1192 tree op0 = TREE_OPERAND (in, 0);
1193 tree op1 = TREE_OPERAND (in, 1);
1194 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1195 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1197 /* First see if either of the operands is a literal, then a constant. */
1198 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1199 *litp = op0, op0 = 0;
1200 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1201 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1203 if (op0 != 0 && TREE_CONSTANT (op0))
1204 *conp = op0, op0 = 0;
1205 else if (op1 != 0 && TREE_CONSTANT (op1))
1206 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1208 /* If we haven't dealt with either operand, this is not a case we can
1209 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1210 if (op0 != 0 && op1 != 0)
1215 var = op1, neg_var_p = neg1_p;
1217 /* Now do any needed negations. */
1219 *minus_litp = *litp, *litp = 0;
1221 *conp = negate_expr (*conp);
1223 var = negate_expr (var);
1225 else if (TREE_CONSTANT (in))
1233 *minus_litp = *litp, *litp = 0;
1234 else if (*minus_litp)
1235 *litp = *minus_litp, *minus_litp = 0;
1236 *conp = negate_expr (*conp);
1237 var = negate_expr (var);
1243 /* Re-associate trees split by the above function. T1 and T2 are either
1244 expressions to associate or null. Return the new expression, if any. If
1245 we build an operation, do it in TYPE and with CODE. */
1248 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1255 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1256 try to fold this since we will have infinite recursion. But do
1257 deal with any NEGATE_EXPRs. */
1258 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1259 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1261 if (code == PLUS_EXPR)
1263 if (TREE_CODE (t1) == NEGATE_EXPR)
1264 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1265 fold_convert (type, TREE_OPERAND (t1, 0)));
1266 else if (TREE_CODE (t2) == NEGATE_EXPR)
1267 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1268 fold_convert (type, TREE_OPERAND (t2, 0)));
1270 return build2 (code, type, fold_convert (type, t1),
1271 fold_convert (type, t2));
1274 return fold (build2 (code, type, fold_convert (type, t1),
1275 fold_convert (type, t2)));
1278 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1279 to produce a new constant.
1281 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1284 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1286 unsigned HOST_WIDE_INT int1l, int2l;
1287 HOST_WIDE_INT int1h, int2h;
1288 unsigned HOST_WIDE_INT low;
1290 unsigned HOST_WIDE_INT garbagel;
1291 HOST_WIDE_INT garbageh;
1293 tree type = TREE_TYPE (arg1);
1294 int uns = TYPE_UNSIGNED (type);
1296 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1298 int no_overflow = 0;
1300 int1l = TREE_INT_CST_LOW (arg1);
1301 int1h = TREE_INT_CST_HIGH (arg1);
1302 int2l = TREE_INT_CST_LOW (arg2);
1303 int2h = TREE_INT_CST_HIGH (arg2);
1308 low = int1l | int2l, hi = int1h | int2h;
1312 low = int1l ^ int2l, hi = int1h ^ int2h;
1316 low = int1l & int2l, hi = int1h & int2h;
1322 /* It's unclear from the C standard whether shifts can overflow.
1323 The following code ignores overflow; perhaps a C standard
1324 interpretation ruling is needed. */
1325 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1338 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1342 neg_double (int2l, int2h, &low, &hi);
1343 add_double (int1l, int1h, low, hi, &low, &hi);
1344 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1348 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1351 case TRUNC_DIV_EXPR:
1352 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1353 case EXACT_DIV_EXPR:
1354 /* This is a shortcut for a common special case. */
1355 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1356 && ! TREE_CONSTANT_OVERFLOW (arg1)
1357 && ! TREE_CONSTANT_OVERFLOW (arg2)
1358 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1360 if (code == CEIL_DIV_EXPR)
1363 low = int1l / int2l, hi = 0;
1367 /* ... fall through ... */
1369 case ROUND_DIV_EXPR:
1370 if (int2h == 0 && int2l == 1)
1372 low = int1l, hi = int1h;
1375 if (int1l == int2l && int1h == int2h
1376 && ! (int1l == 0 && int1h == 0))
1381 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1382 &low, &hi, &garbagel, &garbageh);
1385 case TRUNC_MOD_EXPR:
1386 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1387 /* This is a shortcut for a common special case. */
1388 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1389 && ! TREE_CONSTANT_OVERFLOW (arg1)
1390 && ! TREE_CONSTANT_OVERFLOW (arg2)
1391 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1393 if (code == CEIL_MOD_EXPR)
1395 low = int1l % int2l, hi = 0;
1399 /* ... fall through ... */
1401 case ROUND_MOD_EXPR:
1402 overflow = div_and_round_double (code, uns,
1403 int1l, int1h, int2l, int2h,
1404 &garbagel, &garbageh, &low, &hi);
1410 low = (((unsigned HOST_WIDE_INT) int1h
1411 < (unsigned HOST_WIDE_INT) int2h)
1412 || (((unsigned HOST_WIDE_INT) int1h
1413 == (unsigned HOST_WIDE_INT) int2h)
1416 low = (int1h < int2h
1417 || (int1h == int2h && int1l < int2l));
1419 if (low == (code == MIN_EXPR))
1420 low = int1l, hi = int1h;
1422 low = int2l, hi = int2h;
1429 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1433 /* Propagate overflow flags ourselves. */
1434 if (((!uns || is_sizetype) && overflow)
1435 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1438 TREE_OVERFLOW (t) = 1;
1439 TREE_CONSTANT_OVERFLOW (t) = 1;
1441 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1444 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 t = force_fit_type (t, 1,
1449 ((!uns || is_sizetype) && overflow)
1450 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1451 TREE_CONSTANT_OVERFLOW (arg1)
1452 | TREE_CONSTANT_OVERFLOW (arg2));
1457 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1458 constant. We assume ARG1 and ARG2 have the same data type, or at least
1459 are the same kind of constant and the same machine mode.
1461 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1464 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1469 if (TREE_CODE (arg1) == INTEGER_CST)
1470 return int_const_binop (code, arg1, arg2, notrunc);
1472 if (TREE_CODE (arg1) == REAL_CST)
1474 enum machine_mode mode;
1477 REAL_VALUE_TYPE value;
1480 d1 = TREE_REAL_CST (arg1);
1481 d2 = TREE_REAL_CST (arg2);
1483 type = TREE_TYPE (arg1);
1484 mode = TYPE_MODE (type);
1486 /* Don't perform operation if we honor signaling NaNs and
1487 either operand is a NaN. */
1488 if (HONOR_SNANS (mode)
1489 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1492 /* Don't perform operation if it would raise a division
1493 by zero exception. */
1494 if (code == RDIV_EXPR
1495 && REAL_VALUES_EQUAL (d2, dconst0)
1496 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1499 /* If either operand is a NaN, just return it. Otherwise, set up
1500 for floating-point trap; we return an overflow. */
1501 if (REAL_VALUE_ISNAN (d1))
1503 else if (REAL_VALUE_ISNAN (d2))
1506 REAL_ARITHMETIC (value, code, d1, d2);
1508 t = build_real (type, real_value_truncate (mode, value));
1510 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1511 TREE_CONSTANT_OVERFLOW (t)
1513 | TREE_CONSTANT_OVERFLOW (arg1)
1514 | TREE_CONSTANT_OVERFLOW (arg2);
1517 if (TREE_CODE (arg1) == COMPLEX_CST)
1519 tree type = TREE_TYPE (arg1);
1520 tree r1 = TREE_REALPART (arg1);
1521 tree i1 = TREE_IMAGPART (arg1);
1522 tree r2 = TREE_REALPART (arg2);
1523 tree i2 = TREE_IMAGPART (arg2);
1529 t = build_complex (type,
1530 const_binop (PLUS_EXPR, r1, r2, notrunc),
1531 const_binop (PLUS_EXPR, i1, i2, notrunc));
1535 t = build_complex (type,
1536 const_binop (MINUS_EXPR, r1, r2, notrunc),
1537 const_binop (MINUS_EXPR, i1, i2, notrunc));
1541 t = build_complex (type,
1542 const_binop (MINUS_EXPR,
1543 const_binop (MULT_EXPR,
1545 const_binop (MULT_EXPR,
1548 const_binop (PLUS_EXPR,
1549 const_binop (MULT_EXPR,
1551 const_binop (MULT_EXPR,
1559 = const_binop (PLUS_EXPR,
1560 const_binop (MULT_EXPR, r2, r2, notrunc),
1561 const_binop (MULT_EXPR, i2, i2, notrunc),
1564 t = build_complex (type,
1566 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1567 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1568 const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR, r1, r2,
1571 const_binop (MULT_EXPR, i1, i2,
1574 magsquared, notrunc),
1576 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1577 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1578 const_binop (MINUS_EXPR,
1579 const_binop (MULT_EXPR, i1, r2,
1581 const_binop (MULT_EXPR, r1, i2,
1584 magsquared, notrunc));
1596 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1597 indicates which particular sizetype to create. */
1600 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1602 return build_int_cst (sizetype_tab[(int) kind], number);
1605 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1606 is a tree code. The type of the result is taken from the operands.
1607 Both must be the same type integer type and it must be a size type.
1608 If the operands are constant, so is the result. */
1611 size_binop (enum tree_code code, tree arg0, tree arg1)
1613 tree type = TREE_TYPE (arg0);
1615 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1616 && type == TREE_TYPE (arg1));
1618 /* Handle the special case of two integer constants faster. */
1619 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1621 /* And some specific cases even faster than that. */
1622 if (code == PLUS_EXPR && integer_zerop (arg0))
1624 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1625 && integer_zerop (arg1))
1627 else if (code == MULT_EXPR && integer_onep (arg0))
1630 /* Handle general case of two integer constants. */
1631 return int_const_binop (code, arg0, arg1, 0);
1634 if (arg0 == error_mark_node || arg1 == error_mark_node)
1635 return error_mark_node;
1637 return fold (build2 (code, type, arg0, arg1));
1640 /* Given two values, either both of sizetype or both of bitsizetype,
1641 compute the difference between the two values. Return the value
1642 in signed type corresponding to the type of the operands. */
1645 size_diffop (tree arg0, tree arg1)
1647 tree type = TREE_TYPE (arg0);
1650 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1651 && type == TREE_TYPE (arg1));
1653 /* If the type is already signed, just do the simple thing. */
1654 if (!TYPE_UNSIGNED (type))
1655 return size_binop (MINUS_EXPR, arg0, arg1);
1657 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1659 /* If either operand is not a constant, do the conversions to the signed
1660 type and subtract. The hardware will do the right thing with any
1661 overflow in the subtraction. */
1662 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1663 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1664 fold_convert (ctype, arg1));
1666 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1667 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1668 overflow) and negate (which can't either). Special-case a result
1669 of zero while we're here. */
1670 if (tree_int_cst_equal (arg0, arg1))
1671 return fold_convert (ctype, integer_zero_node);
1672 else if (tree_int_cst_lt (arg1, arg0))
1673 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1675 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1676 fold_convert (ctype, size_binop (MINUS_EXPR,
1681 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1682 type TYPE. If no simplification can be done return NULL_TREE. */
1685 fold_convert_const (enum tree_code code, tree type, tree arg1)
1690 if (TREE_TYPE (arg1) == type)
1693 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1695 if (TREE_CODE (arg1) == INTEGER_CST)
1697 /* If we would build a constant wider than GCC supports,
1698 leave the conversion unfolded. */
1699 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1702 /* Given an integer constant, make new constant with new type,
1703 appropriately sign-extended or truncated. */
1704 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1705 TREE_INT_CST_HIGH (arg1));
1707 t = force_fit_type (t,
1708 /* Don't set the overflow when
1709 converting a pointer */
1710 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1711 (TREE_INT_CST_HIGH (arg1) < 0
1712 && (TYPE_UNSIGNED (type)
1713 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1714 | TREE_OVERFLOW (arg1),
1715 TREE_CONSTANT_OVERFLOW (arg1));
1718 else if (TREE_CODE (arg1) == REAL_CST)
1720 /* The following code implements the floating point to integer
1721 conversion rules required by the Java Language Specification,
1722 that IEEE NaNs are mapped to zero and values that overflow
1723 the target precision saturate, i.e. values greater than
1724 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1725 are mapped to INT_MIN. These semantics are allowed by the
1726 C and C++ standards that simply state that the behavior of
1727 FP-to-integer conversion is unspecified upon overflow. */
1729 HOST_WIDE_INT high, low;
1731 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1735 case FIX_TRUNC_EXPR:
1736 real_trunc (&r, VOIDmode, &x);
1740 real_ceil (&r, VOIDmode, &x);
1743 case FIX_FLOOR_EXPR:
1744 real_floor (&r, VOIDmode, &x);
1747 case FIX_ROUND_EXPR:
1748 real_round (&r, VOIDmode, &x);
1755 /* If R is NaN, return zero and show we have an overflow. */
1756 if (REAL_VALUE_ISNAN (r))
1763 /* See if R is less than the lower bound or greater than the
1768 tree lt = TYPE_MIN_VALUE (type);
1769 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1770 if (REAL_VALUES_LESS (r, l))
1773 high = TREE_INT_CST_HIGH (lt);
1774 low = TREE_INT_CST_LOW (lt);
1780 tree ut = TYPE_MAX_VALUE (type);
1783 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1784 if (REAL_VALUES_LESS (u, r))
1787 high = TREE_INT_CST_HIGH (ut);
1788 low = TREE_INT_CST_LOW (ut);
1794 REAL_VALUE_TO_INT (&low, &high, r);
1796 t = build_int_cst_wide (type, low, high);
1798 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1799 TREE_CONSTANT_OVERFLOW (arg1));
1803 else if (TREE_CODE (type) == REAL_TYPE)
1805 if (TREE_CODE (arg1) == INTEGER_CST)
1806 return build_real_from_int_cst (type, arg1);
1807 if (TREE_CODE (arg1) == REAL_CST)
1809 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1811 /* We make a copy of ARG1 so that we don't modify an
1812 existing constant tree. */
1813 t = copy_node (arg1);
1814 TREE_TYPE (t) = type;
1818 t = build_real (type,
1819 real_value_truncate (TYPE_MODE (type),
1820 TREE_REAL_CST (arg1)));
1822 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1823 TREE_CONSTANT_OVERFLOW (t)
1824 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1831 /* Convert expression ARG to type TYPE. Used by the middle-end for
1832 simple conversions in preference to calling the front-end's convert. */
1835 fold_convert (tree type, tree arg)
1837 tree orig = TREE_TYPE (arg);
1843 if (TREE_CODE (arg) == ERROR_MARK
1844 || TREE_CODE (type) == ERROR_MARK
1845 || TREE_CODE (orig) == ERROR_MARK)
1846 return error_mark_node;
1848 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1849 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1850 TYPE_MAIN_VARIANT (orig)))
1851 return fold (build1 (NOP_EXPR, type, arg));
1853 switch (TREE_CODE (type))
1855 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1856 case POINTER_TYPE: case REFERENCE_TYPE:
1858 if (TREE_CODE (arg) == INTEGER_CST)
1860 tem = fold_convert_const (NOP_EXPR, type, arg);
1861 if (tem != NULL_TREE)
1864 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1865 || TREE_CODE (orig) == OFFSET_TYPE)
1866 return fold (build1 (NOP_EXPR, type, arg));
1867 if (TREE_CODE (orig) == COMPLEX_TYPE)
1869 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1870 return fold_convert (type, tem);
1872 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1873 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1874 return fold (build1 (NOP_EXPR, type, arg));
1877 if (TREE_CODE (arg) == INTEGER_CST)
1879 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1880 if (tem != NULL_TREE)
1883 else if (TREE_CODE (arg) == REAL_CST)
1885 tem = fold_convert_const (NOP_EXPR, type, arg);
1886 if (tem != NULL_TREE)
1890 switch (TREE_CODE (orig))
1892 case INTEGER_TYPE: case CHAR_TYPE:
1893 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1894 case POINTER_TYPE: case REFERENCE_TYPE:
1895 return fold (build1 (FLOAT_EXPR, type, arg));
1898 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1902 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1903 return fold_convert (type, tem);
1910 switch (TREE_CODE (orig))
1912 case INTEGER_TYPE: case CHAR_TYPE:
1913 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1914 case POINTER_TYPE: case REFERENCE_TYPE:
1916 return build2 (COMPLEX_EXPR, type,
1917 fold_convert (TREE_TYPE (type), arg),
1918 fold_convert (TREE_TYPE (type), integer_zero_node));
1923 if (TREE_CODE (arg) == COMPLEX_EXPR)
1925 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1926 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1927 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1930 arg = save_expr (arg);
1931 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1932 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1933 rpart = fold_convert (TREE_TYPE (type), rpart);
1934 ipart = fold_convert (TREE_TYPE (type), ipart);
1935 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1943 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1944 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1945 || TREE_CODE (orig) == VECTOR_TYPE);
1946 return fold (build1 (NOP_EXPR, type, arg));
1949 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1956 /* Return an expr equal to X but certainly not valid as an lvalue. */
1961 /* We only need to wrap lvalue tree codes. */
1962 switch (TREE_CODE (x))
1974 case ARRAY_RANGE_REF:
1980 case PREINCREMENT_EXPR:
1981 case PREDECREMENT_EXPR:
1983 case TRY_CATCH_EXPR:
1984 case WITH_CLEANUP_EXPR:
1995 /* Assume the worst for front-end tree codes. */
1996 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2000 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2003 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2004 Zero means allow extended lvalues. */
2006 int pedantic_lvalues;
2008 /* When pedantic, return an expr equal to X but certainly not valid as a
2009 pedantic lvalue. Otherwise, return X. */
2012 pedantic_non_lvalue (tree x)
2014 if (pedantic_lvalues)
2015 return non_lvalue (x);
2020 /* Given a tree comparison code, return the code that is the logical inverse
2021 of the given code. It is not safe to do this for floating-point
2022 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2023 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2025 static enum tree_code
2026 invert_tree_comparison (enum tree_code code, bool honor_nans)
2028 if (honor_nans && flag_trapping_math)
2038 return honor_nans ? UNLE_EXPR : LE_EXPR;
2040 return honor_nans ? UNLT_EXPR : LT_EXPR;
2042 return honor_nans ? UNGE_EXPR : GE_EXPR;
2044 return honor_nans ? UNGT_EXPR : GT_EXPR;
2058 return UNORDERED_EXPR;
2059 case UNORDERED_EXPR:
2060 return ORDERED_EXPR;
2066 /* Similar, but return the comparison that results if the operands are
2067 swapped. This is safe for floating-point. */
2070 swap_tree_comparison (enum tree_code code)
2091 /* Convert a comparison tree code from an enum tree_code representation
2092 into a compcode bit-based encoding. This function is the inverse of
2093 compcode_to_comparison. */
2095 static enum comparison_code
2096 comparison_to_compcode (enum tree_code code)
2113 return COMPCODE_ORD;
2114 case UNORDERED_EXPR:
2115 return COMPCODE_UNORD;
2117 return COMPCODE_UNLT;
2119 return COMPCODE_UNEQ;
2121 return COMPCODE_UNLE;
2123 return COMPCODE_UNGT;
2125 return COMPCODE_LTGT;
2127 return COMPCODE_UNGE;
2133 /* Convert a compcode bit-based encoding of a comparison operator back
2134 to GCC's enum tree_code representation. This function is the
2135 inverse of comparison_to_compcode. */
2137 static enum tree_code
2138 compcode_to_comparison (enum comparison_code code)
2155 return ORDERED_EXPR;
2156 case COMPCODE_UNORD:
2157 return UNORDERED_EXPR;
2175 /* Return a tree for the comparison which is the combination of
2176 doing the AND or OR (depending on CODE) of the two operations LCODE
2177 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2178 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2179 if this makes the transformation invalid. */
2182 combine_comparisons (enum tree_code code, enum tree_code lcode,
2183 enum tree_code rcode, tree truth_type,
2184 tree ll_arg, tree lr_arg)
2186 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2187 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2188 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2189 enum comparison_code compcode;
2193 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2194 compcode = lcompcode & rcompcode;
2197 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2198 compcode = lcompcode | rcompcode;
2207 /* Eliminate unordered comparisons, as well as LTGT and ORD
2208 which are not used unless the mode has NaNs. */
2209 compcode &= ~COMPCODE_UNORD;
2210 if (compcode == COMPCODE_LTGT)
2211 compcode = COMPCODE_NE;
2212 else if (compcode == COMPCODE_ORD)
2213 compcode = COMPCODE_TRUE;
2215 else if (flag_trapping_math)
2217 /* Check that the original operation and the optimized ones will trap
2218 under the same condition. */
2219 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2220 && (lcompcode != COMPCODE_EQ)
2221 && (lcompcode != COMPCODE_ORD);
2222 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2223 && (rcompcode != COMPCODE_EQ)
2224 && (rcompcode != COMPCODE_ORD);
2225 bool trap = (compcode & COMPCODE_UNORD) == 0
2226 && (compcode != COMPCODE_EQ)
2227 && (compcode != COMPCODE_ORD);
2229 /* In a short-circuited boolean expression the LHS might be
2230 such that the RHS, if evaluated, will never trap. For
2231 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2232 if neither x nor y is NaN. (This is a mixed blessing: for
2233 example, the expression above will never trap, hence
2234 optimizing it to x < y would be invalid). */
2235 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2236 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2239 /* If the comparison was short-circuited, and only the RHS
2240 trapped, we may now generate a spurious trap. */
2242 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2245 /* If we changed the conditions that cause a trap, we lose. */
2246 if ((ltrap || rtrap) != trap)
2250 if (compcode == COMPCODE_TRUE)
2251 return constant_boolean_node (true, truth_type);
2252 else if (compcode == COMPCODE_FALSE)
2253 return constant_boolean_node (false, truth_type);
2255 return fold (build2 (compcode_to_comparison (compcode),
2256 truth_type, ll_arg, lr_arg));
2259 /* Return nonzero if CODE is a tree code that represents a truth value. */
2262 truth_value_p (enum tree_code code)
2264 return (TREE_CODE_CLASS (code) == '<'
2265 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2266 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2267 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2270 /* Return nonzero if two operands (typically of the same tree node)
2271 are necessarily equal. If either argument has side-effects this
2272 function returns zero. FLAGS modifies behavior as follows:
2274 If OEP_ONLY_CONST is set, only return nonzero for constants.
2275 This function tests whether the operands are indistinguishable;
2276 it does not test whether they are equal using C's == operation.
2277 The distinction is important for IEEE floating point, because
2278 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2279 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2281 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2282 even though it may hold multiple values during a function.
2283 This is because a GCC tree node guarantees that nothing else is
2284 executed between the evaluation of its "operands" (which may often
2285 be evaluated in arbitrary order). Hence if the operands themselves
2286 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2287 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2288 unset means assuming isochronic (or instantaneous) tree equivalence.
2289 Unless comparing arbitrary expression trees, such as from different
2290 statements, this flag can usually be left unset.
2292 If OEP_PURE_SAME is set, then pure functions with identical arguments
2293 are considered the same. It is used when the caller has other ways
2294 to ensure that global memory is unchanged in between. */
2297 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2299 /* If one is specified and the other isn't, they aren't equal and if
2300 neither is specified, they are.
2302 ??? This is temporary and is meant only to handle the cases of the
2303 optional operands for COMPONENT_REF and ARRAY_REF. */
2304 if ((arg0 && !arg1) || (!arg0 && arg1))
2306 else if (!arg0 && !arg1)
2308 /* If either is ERROR_MARK, they aren't equal. */
2309 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2312 /* If both types don't have the same signedness, then we can't consider
2313 them equal. We must check this before the STRIP_NOPS calls
2314 because they may change the signedness of the arguments. */
2315 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2321 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2322 /* This is needed for conversions and for COMPONENT_REF.
2323 Might as well play it safe and always test this. */
2324 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2325 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2326 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2329 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2330 We don't care about side effects in that case because the SAVE_EXPR
2331 takes care of that for us. In all other cases, two expressions are
2332 equal if they have no side effects. If we have two identical
2333 expressions with side effects that should be treated the same due
2334 to the only side effects being identical SAVE_EXPR's, that will
2335 be detected in the recursive calls below. */
2336 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2337 && (TREE_CODE (arg0) == SAVE_EXPR
2338 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2341 /* Next handle constant cases, those for which we can return 1 even
2342 if ONLY_CONST is set. */
2343 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2344 switch (TREE_CODE (arg0))
2347 return (! TREE_CONSTANT_OVERFLOW (arg0)
2348 && ! TREE_CONSTANT_OVERFLOW (arg1)
2349 && tree_int_cst_equal (arg0, arg1));
2352 return (! TREE_CONSTANT_OVERFLOW (arg0)
2353 && ! TREE_CONSTANT_OVERFLOW (arg1)
2354 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2355 TREE_REAL_CST (arg1)));
2361 if (TREE_CONSTANT_OVERFLOW (arg0)
2362 || TREE_CONSTANT_OVERFLOW (arg1))
2365 v1 = TREE_VECTOR_CST_ELTS (arg0);
2366 v2 = TREE_VECTOR_CST_ELTS (arg1);
2369 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2372 v1 = TREE_CHAIN (v1);
2373 v2 = TREE_CHAIN (v2);
2380 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2382 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2386 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2387 && ! memcmp (TREE_STRING_POINTER (arg0),
2388 TREE_STRING_POINTER (arg1),
2389 TREE_STRING_LENGTH (arg0)));
2392 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2398 if (flags & OEP_ONLY_CONST)
2401 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2404 /* Two conversions are equal only if signedness and modes match. */
2405 switch (TREE_CODE (arg0))
2410 case FIX_TRUNC_EXPR:
2411 case FIX_FLOOR_EXPR:
2412 case FIX_ROUND_EXPR:
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2414 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2421 return operand_equal_p (TREE_OPERAND (arg0, 0),
2422 TREE_OPERAND (arg1, 0), flags);
2426 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2427 TREE_OPERAND (arg1, 0), flags)
2428 && operand_equal_p (TREE_OPERAND (arg0, 1),
2429 TREE_OPERAND (arg1, 1), flags))
2432 /* For commutative ops, allow the other order. */
2433 return (commutative_tree_code (TREE_CODE (arg0))
2434 && operand_equal_p (TREE_OPERAND (arg0, 0),
2435 TREE_OPERAND (arg1, 1), flags)
2436 && operand_equal_p (TREE_OPERAND (arg0, 1),
2437 TREE_OPERAND (arg1, 0), flags));
2440 /* If either of the pointer (or reference) expressions we are
2441 dereferencing contain a side effect, these cannot be equal. */
2442 if (TREE_SIDE_EFFECTS (arg0)
2443 || TREE_SIDE_EFFECTS (arg1))
2446 switch (TREE_CODE (arg0))
2451 return operand_equal_p (TREE_OPERAND (arg0, 0),
2452 TREE_OPERAND (arg1, 0), flags);
2455 case ARRAY_RANGE_REF:
2456 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2457 TREE_OPERAND (arg1, 0), flags)
2458 && operand_equal_p (TREE_OPERAND (arg0, 1),
2459 TREE_OPERAND (arg1, 1), flags)
2460 && operand_equal_p (TREE_OPERAND (arg0, 2),
2461 TREE_OPERAND (arg1, 2), flags)
2462 && operand_equal_p (TREE_OPERAND (arg0, 3),
2463 TREE_OPERAND (arg1, 3), flags));
2467 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2468 TREE_OPERAND (arg1, 0), flags)
2469 && operand_equal_p (TREE_OPERAND (arg0, 1),
2470 TREE_OPERAND (arg1, 1), flags)
2471 && operand_equal_p (TREE_OPERAND (arg0, 2),
2472 TREE_OPERAND (arg1, 2), flags));
2476 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2477 TREE_OPERAND (arg1, 0), flags)
2478 && operand_equal_p (TREE_OPERAND (arg0, 1),
2479 TREE_OPERAND (arg1, 1), flags)
2480 && operand_equal_p (TREE_OPERAND (arg0, 2),
2481 TREE_OPERAND (arg1, 2), flags));
2487 switch (TREE_CODE (arg0))
2490 case TRUTH_NOT_EXPR:
2491 return operand_equal_p (TREE_OPERAND (arg0, 0),
2492 TREE_OPERAND (arg1, 0), flags);
2494 case TRUTH_ANDIF_EXPR:
2495 case TRUTH_ORIF_EXPR:
2496 return operand_equal_p (TREE_OPERAND (arg0, 0),
2497 TREE_OPERAND (arg1, 0), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 1),
2499 TREE_OPERAND (arg1, 1), flags);
2501 case TRUTH_AND_EXPR:
2503 case TRUTH_XOR_EXPR:
2504 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2505 TREE_OPERAND (arg1, 0), flags)
2506 && operand_equal_p (TREE_OPERAND (arg0, 1),
2507 TREE_OPERAND (arg1, 1), flags))
2508 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2509 TREE_OPERAND (arg1, 1), flags)
2510 && operand_equal_p (TREE_OPERAND (arg0, 1),
2511 TREE_OPERAND (arg1, 0), flags));
2514 /* If the CALL_EXPRs call different functions, then they
2515 clearly can not be equal. */
2516 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2517 TREE_OPERAND (arg1, 0), flags))
2521 unsigned int cef = call_expr_flags (arg0);
2522 if (flags & OEP_PURE_SAME)
2523 cef &= ECF_CONST | ECF_PURE;
2530 /* Now see if all the arguments are the same. operand_equal_p
2531 does not handle TREE_LIST, so we walk the operands here
2532 feeding them to operand_equal_p. */
2533 arg0 = TREE_OPERAND (arg0, 1);
2534 arg1 = TREE_OPERAND (arg1, 1);
2535 while (arg0 && arg1)
2537 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2541 arg0 = TREE_CHAIN (arg0);
2542 arg1 = TREE_CHAIN (arg1);
2545 /* If we get here and both argument lists are exhausted
2546 then the CALL_EXPRs are equal. */
2547 return ! (arg0 || arg1);
2554 /* Consider __builtin_sqrt equal to sqrt. */
2555 return (TREE_CODE (arg0) == FUNCTION_DECL
2556 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2557 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2558 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2565 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2566 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2568 When in doubt, return 0. */
2571 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2573 int unsignedp1, unsignedpo;
2574 tree primarg0, primarg1, primother;
2575 unsigned int correct_width;
2577 if (operand_equal_p (arg0, arg1, 0))
2580 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2581 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2584 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2585 and see if the inner values are the same. This removes any
2586 signedness comparison, which doesn't matter here. */
2587 primarg0 = arg0, primarg1 = arg1;
2588 STRIP_NOPS (primarg0);
2589 STRIP_NOPS (primarg1);
2590 if (operand_equal_p (primarg0, primarg1, 0))
2593 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2594 actual comparison operand, ARG0.
2596 First throw away any conversions to wider types
2597 already present in the operands. */
2599 primarg1 = get_narrower (arg1, &unsignedp1);
2600 primother = get_narrower (other, &unsignedpo);
2602 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2603 if (unsignedp1 == unsignedpo
2604 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2605 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2607 tree type = TREE_TYPE (arg0);
2609 /* Make sure shorter operand is extended the right way
2610 to match the longer operand. */
2611 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2612 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2614 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2621 /* See if ARG is an expression that is either a comparison or is performing
2622 arithmetic on comparisons. The comparisons must only be comparing
2623 two different values, which will be stored in *CVAL1 and *CVAL2; if
2624 they are nonzero it means that some operands have already been found.
2625 No variables may be used anywhere else in the expression except in the
2626 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2627 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2629 If this is true, return 1. Otherwise, return zero. */
2632 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2634 enum tree_code code = TREE_CODE (arg);
2635 char class = TREE_CODE_CLASS (code);
2637 /* We can handle some of the 'e' cases here. */
2638 if (class == 'e' && code == TRUTH_NOT_EXPR)
2640 else if (class == 'e'
2641 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2642 || code == COMPOUND_EXPR))
2645 else if (class == 'e' && code == SAVE_EXPR
2646 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2648 /* If we've already found a CVAL1 or CVAL2, this expression is
2649 two complex to handle. */
2650 if (*cval1 || *cval2)
2660 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2663 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2664 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2665 cval1, cval2, save_p));
2671 if (code == COND_EXPR)
2672 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2673 cval1, cval2, save_p)
2674 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2675 cval1, cval2, save_p)
2676 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2677 cval1, cval2, save_p));
2681 /* First see if we can handle the first operand, then the second. For
2682 the second operand, we know *CVAL1 can't be zero. It must be that
2683 one side of the comparison is each of the values; test for the
2684 case where this isn't true by failing if the two operands
2687 if (operand_equal_p (TREE_OPERAND (arg, 0),
2688 TREE_OPERAND (arg, 1), 0))
2692 *cval1 = TREE_OPERAND (arg, 0);
2693 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2695 else if (*cval2 == 0)
2696 *cval2 = TREE_OPERAND (arg, 0);
2697 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2702 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2704 else if (*cval2 == 0)
2705 *cval2 = TREE_OPERAND (arg, 1);
2706 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2718 /* ARG is a tree that is known to contain just arithmetic operations and
2719 comparisons. Evaluate the operations in the tree substituting NEW0 for
2720 any occurrence of OLD0 as an operand of a comparison and likewise for
2724 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2726 tree type = TREE_TYPE (arg);
2727 enum tree_code code = TREE_CODE (arg);
2728 char class = TREE_CODE_CLASS (code);
2730 /* We can handle some of the 'e' cases here. */
2731 if (class == 'e' && code == TRUTH_NOT_EXPR)
2733 else if (class == 'e'
2734 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2740 return fold (build1 (code, type,
2741 eval_subst (TREE_OPERAND (arg, 0),
2742 old0, new0, old1, new1)));
2745 return fold (build2 (code, type,
2746 eval_subst (TREE_OPERAND (arg, 0),
2747 old0, new0, old1, new1),
2748 eval_subst (TREE_OPERAND (arg, 1),
2749 old0, new0, old1, new1)));
2755 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2758 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2761 return fold (build3 (code, type,
2762 eval_subst (TREE_OPERAND (arg, 0),
2763 old0, new0, old1, new1),
2764 eval_subst (TREE_OPERAND (arg, 1),
2765 old0, new0, old1, new1),
2766 eval_subst (TREE_OPERAND (arg, 2),
2767 old0, new0, old1, new1)));
2771 /* Fall through - ??? */
2775 tree arg0 = TREE_OPERAND (arg, 0);
2776 tree arg1 = TREE_OPERAND (arg, 1);
2778 /* We need to check both for exact equality and tree equality. The
2779 former will be true if the operand has a side-effect. In that
2780 case, we know the operand occurred exactly once. */
2782 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2784 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2787 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2789 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2792 return fold (build2 (code, type, arg0, arg1));
2800 /* Return a tree for the case when the result of an expression is RESULT
2801 converted to TYPE and OMITTED was previously an operand of the expression
2802 but is now not needed (e.g., we folded OMITTED * 0).
2804 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2805 the conversion of RESULT to TYPE. */
2808 omit_one_operand (tree type, tree result, tree omitted)
2810 tree t = fold_convert (type, result);
2812 if (TREE_SIDE_EFFECTS (omitted))
2813 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2815 return non_lvalue (t);
2818 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2821 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2823 tree t = fold_convert (type, result);
2825 if (TREE_SIDE_EFFECTS (omitted))
2826 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2828 return pedantic_non_lvalue (t);
2831 /* Return a tree for the case when the result of an expression is RESULT
2832 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2833 of the expression but are now not needed.
2835 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2836 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2837 evaluated before OMITTED2. Otherwise, if neither has side effects,
2838 just do the conversion of RESULT to TYPE. */
2841 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2843 tree t = fold_convert (type, result);
2845 if (TREE_SIDE_EFFECTS (omitted2))
2846 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2847 if (TREE_SIDE_EFFECTS (omitted1))
2848 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2850 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2854 /* Return a simplified tree node for the truth-negation of ARG. This
2855 never alters ARG itself. We assume that ARG is an operation that
2856 returns a truth value (0 or 1).
2858 FIXME: one would think we would fold the result, but it causes
2859 problems with the dominator optimizer. */
2861 invert_truthvalue (tree arg)
2863 tree type = TREE_TYPE (arg);
2864 enum tree_code code = TREE_CODE (arg);
2866 if (code == ERROR_MARK)
2869 /* If this is a comparison, we can simply invert it, except for
2870 floating-point non-equality comparisons, in which case we just
2871 enclose a TRUTH_NOT_EXPR around what we have. */
2873 if (TREE_CODE_CLASS (code) == '<')
2875 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2876 if (FLOAT_TYPE_P (op_type)
2877 && flag_trapping_math
2878 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2879 && code != NE_EXPR && code != EQ_EXPR)
2880 return build1 (TRUTH_NOT_EXPR, type, arg);
2883 code = invert_tree_comparison (code,
2884 HONOR_NANS (TYPE_MODE (op_type)));
2885 if (code == ERROR_MARK)
2886 return build1 (TRUTH_NOT_EXPR, type, arg);
2888 return build2 (code, type,
2889 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2896 return fold_convert (type,
2897 build_int_cst (NULL_TREE, integer_zerop (arg)));
2899 case TRUTH_AND_EXPR:
2900 return build2 (TRUTH_OR_EXPR, type,
2901 invert_truthvalue (TREE_OPERAND (arg, 0)),
2902 invert_truthvalue (TREE_OPERAND (arg, 1)));
2905 return build2 (TRUTH_AND_EXPR, type,
2906 invert_truthvalue (TREE_OPERAND (arg, 0)),
2907 invert_truthvalue (TREE_OPERAND (arg, 1)));
2909 case TRUTH_XOR_EXPR:
2910 /* Here we can invert either operand. We invert the first operand
2911 unless the second operand is a TRUTH_NOT_EXPR in which case our
2912 result is the XOR of the first operand with the inside of the
2913 negation of the second operand. */
2915 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2916 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2917 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2919 return build2 (TRUTH_XOR_EXPR, type,
2920 invert_truthvalue (TREE_OPERAND (arg, 0)),
2921 TREE_OPERAND (arg, 1));
2923 case TRUTH_ANDIF_EXPR:
2924 return build2 (TRUTH_ORIF_EXPR, type,
2925 invert_truthvalue (TREE_OPERAND (arg, 0)),
2926 invert_truthvalue (TREE_OPERAND (arg, 1)));
2928 case TRUTH_ORIF_EXPR:
2929 return build2 (TRUTH_ANDIF_EXPR, type,
2930 invert_truthvalue (TREE_OPERAND (arg, 0)),
2931 invert_truthvalue (TREE_OPERAND (arg, 1)));
2933 case TRUTH_NOT_EXPR:
2934 return TREE_OPERAND (arg, 0);
2937 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2938 invert_truthvalue (TREE_OPERAND (arg, 1)),
2939 invert_truthvalue (TREE_OPERAND (arg, 2)));
2942 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2943 invert_truthvalue (TREE_OPERAND (arg, 1)));
2945 case NON_LVALUE_EXPR:
2946 return invert_truthvalue (TREE_OPERAND (arg, 0));
2949 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2954 return build1 (TREE_CODE (arg), type,
2955 invert_truthvalue (TREE_OPERAND (arg, 0)));
2958 if (!integer_onep (TREE_OPERAND (arg, 1)))
2960 return build2 (EQ_EXPR, type, arg,
2961 fold_convert (type, integer_zero_node));
2964 return build1 (TRUTH_NOT_EXPR, type, arg);
2966 case CLEANUP_POINT_EXPR:
2967 return build1 (CLEANUP_POINT_EXPR, type,
2968 invert_truthvalue (TREE_OPERAND (arg, 0)));
2973 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
2974 return build1 (TRUTH_NOT_EXPR, type, arg);
2977 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2978 operands are another bit-wise operation with a common input. If so,
2979 distribute the bit operations to save an operation and possibly two if
2980 constants are involved. For example, convert
2981 (A | B) & (A | C) into A | (B & C)
2982 Further simplification will occur if B and C are constants.
2984 If this optimization cannot be done, 0 will be returned. */
2987 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2992 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2993 || TREE_CODE (arg0) == code
2994 || (TREE_CODE (arg0) != BIT_AND_EXPR
2995 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2998 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3000 common = TREE_OPERAND (arg0, 0);
3001 left = TREE_OPERAND (arg0, 1);
3002 right = TREE_OPERAND (arg1, 1);
3004 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3006 common = TREE_OPERAND (arg0, 0);
3007 left = TREE_OPERAND (arg0, 1);
3008 right = TREE_OPERAND (arg1, 0);
3010 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3012 common = TREE_OPERAND (arg0, 1);
3013 left = TREE_OPERAND (arg0, 0);
3014 right = TREE_OPERAND (arg1, 1);
3016 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3018 common = TREE_OPERAND (arg0, 1);
3019 left = TREE_OPERAND (arg0, 0);
3020 right = TREE_OPERAND (arg1, 0);
3025 return fold (build2 (TREE_CODE (arg0), type, common,
3026 fold (build2 (code, type, left, right))));
3029 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3030 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3033 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3036 tree result = build3 (BIT_FIELD_REF, type, inner,
3037 size_int (bitsize), bitsize_int (bitpos));
3039 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3044 /* Optimize a bit-field compare.
3046 There are two cases: First is a compare against a constant and the
3047 second is a comparison of two items where the fields are at the same
3048 bit position relative to the start of a chunk (byte, halfword, word)
3049 large enough to contain it. In these cases we can avoid the shift
3050 implicit in bitfield extractions.
3052 For constants, we emit a compare of the shifted constant with the
3053 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3054 compared. For two fields at the same position, we do the ANDs with the
3055 similar mask and compare the result of the ANDs.
3057 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3058 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3059 are the left and right operands of the comparison, respectively.
3061 If the optimization described above can be done, we return the resulting
3062 tree. Otherwise we return zero. */
3065 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3068 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3069 tree type = TREE_TYPE (lhs);
3070 tree signed_type, unsigned_type;
3071 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3072 enum machine_mode lmode, rmode, nmode;
3073 int lunsignedp, runsignedp;
3074 int lvolatilep = 0, rvolatilep = 0;
3075 tree linner, rinner = NULL_TREE;
3079 /* Get all the information about the extractions being done. If the bit size
3080 if the same as the size of the underlying object, we aren't doing an
3081 extraction at all and so can do nothing. We also don't want to
3082 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3083 then will no longer be able to replace it. */
3084 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3085 &lunsignedp, &lvolatilep);
3086 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3087 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3092 /* If this is not a constant, we can only do something if bit positions,
3093 sizes, and signedness are the same. */
3094 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3095 &runsignedp, &rvolatilep);
3097 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3098 || lunsignedp != runsignedp || offset != 0
3099 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3103 /* See if we can find a mode to refer to this field. We should be able to,
3104 but fail if we can't. */
3105 nmode = get_best_mode (lbitsize, lbitpos,
3106 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3107 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3108 TYPE_ALIGN (TREE_TYPE (rinner))),
3109 word_mode, lvolatilep || rvolatilep);
3110 if (nmode == VOIDmode)
3113 /* Set signed and unsigned types of the precision of this mode for the
3115 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3116 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3118 /* Compute the bit position and size for the new reference and our offset
3119 within it. If the new reference is the same size as the original, we
3120 won't optimize anything, so return zero. */
3121 nbitsize = GET_MODE_BITSIZE (nmode);
3122 nbitpos = lbitpos & ~ (nbitsize - 1);
3124 if (nbitsize == lbitsize)
3127 if (BYTES_BIG_ENDIAN)
3128 lbitpos = nbitsize - lbitsize - lbitpos;
3130 /* Make the mask to be used against the extracted field. */
3131 mask = build_int_cst (unsigned_type, -1);
3132 mask = force_fit_type (mask, 0, false, false);
3133 mask = fold_convert (unsigned_type, mask);
3134 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3135 mask = const_binop (RSHIFT_EXPR, mask,
3136 size_int (nbitsize - lbitsize - lbitpos), 0);
3139 /* If not comparing with constant, just rework the comparison
3141 return build2 (code, compare_type,
3142 build2 (BIT_AND_EXPR, unsigned_type,
3143 make_bit_field_ref (linner, unsigned_type,
3144 nbitsize, nbitpos, 1),
3146 build2 (BIT_AND_EXPR, unsigned_type,
3147 make_bit_field_ref (rinner, unsigned_type,
3148 nbitsize, nbitpos, 1),
3151 /* Otherwise, we are handling the constant case. See if the constant is too
3152 big for the field. Warn and return a tree of for 0 (false) if so. We do
3153 this not only for its own sake, but to avoid having to test for this
3154 error case below. If we didn't, we might generate wrong code.
3156 For unsigned fields, the constant shifted right by the field length should
3157 be all zero. For signed fields, the high-order bits should agree with
3162 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3163 fold_convert (unsigned_type, rhs),
3164 size_int (lbitsize), 0)))
3166 warning ("comparison is always %d due to width of bit-field",
3168 return constant_boolean_node (code == NE_EXPR, compare_type);
3173 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3174 size_int (lbitsize - 1), 0);
3175 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3177 warning ("comparison is always %d due to width of bit-field",
3179 return constant_boolean_node (code == NE_EXPR, compare_type);
3183 /* Single-bit compares should always be against zero. */
3184 if (lbitsize == 1 && ! integer_zerop (rhs))
3186 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3187 rhs = fold_convert (type, integer_zero_node);
3190 /* Make a new bitfield reference, shift the constant over the
3191 appropriate number of bits and mask it with the computed mask
3192 (in case this was a signed field). If we changed it, make a new one. */
3193 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3196 TREE_SIDE_EFFECTS (lhs) = 1;
3197 TREE_THIS_VOLATILE (lhs) = 1;
3200 rhs = fold (const_binop (BIT_AND_EXPR,
3201 const_binop (LSHIFT_EXPR,
3202 fold_convert (unsigned_type, rhs),
3203 size_int (lbitpos), 0),
3206 return build2 (code, compare_type,
3207 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3211 /* Subroutine for fold_truthop: decode a field reference.
3213 If EXP is a comparison reference, we return the innermost reference.
3215 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3216 set to the starting bit number.
3218 If the innermost field can be completely contained in a mode-sized
3219 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3221 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3222 otherwise it is not changed.
3224 *PUNSIGNEDP is set to the signedness of the field.
3226 *PMASK is set to the mask used. This is either contained in a
3227 BIT_AND_EXPR or derived from the width of the field.
3229 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3231 Return 0 if this is not a component reference or is one that we can't
3232 do anything with. */
3235 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3236 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3237 int *punsignedp, int *pvolatilep,
3238 tree *pmask, tree *pand_mask)
3240 tree outer_type = 0;
3242 tree mask, inner, offset;
3244 unsigned int precision;
3246 /* All the optimizations using this function assume integer fields.
3247 There are problems with FP fields since the type_for_size call
3248 below can fail for, e.g., XFmode. */
3249 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3252 /* We are interested in the bare arrangement of bits, so strip everything
3253 that doesn't affect the machine mode. However, record the type of the
3254 outermost expression if it may matter below. */
3255 if (TREE_CODE (exp) == NOP_EXPR
3256 || TREE_CODE (exp) == CONVERT_EXPR
3257 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3258 outer_type = TREE_TYPE (exp);
3261 if (TREE_CODE (exp) == BIT_AND_EXPR)
3263 and_mask = TREE_OPERAND (exp, 1);
3264 exp = TREE_OPERAND (exp, 0);
3265 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3266 if (TREE_CODE (and_mask) != INTEGER_CST)
3270 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3271 punsignedp, pvolatilep);
3272 if ((inner == exp && and_mask == 0)
3273 || *pbitsize < 0 || offset != 0
3274 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3277 /* If the number of bits in the reference is the same as the bitsize of
3278 the outer type, then the outer type gives the signedness. Otherwise
3279 (in case of a small bitfield) the signedness is unchanged. */
3280 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3281 *punsignedp = TYPE_UNSIGNED (outer_type);
3283 /* Compute the mask to access the bitfield. */
3284 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3285 precision = TYPE_PRECISION (unsigned_type);
3287 mask = build_int_cst (unsigned_type, -1);
3288 mask = force_fit_type (mask, 0, false, false);
3290 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3291 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3293 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3295 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3296 fold_convert (unsigned_type, and_mask), mask));
3299 *pand_mask = and_mask;
3303 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3307 all_ones_mask_p (tree mask, int size)
3309 tree type = TREE_TYPE (mask);
3310 unsigned int precision = TYPE_PRECISION (type);
3313 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3314 tmask = force_fit_type (tmask, 0, false, false);
3317 tree_int_cst_equal (mask,
3318 const_binop (RSHIFT_EXPR,
3319 const_binop (LSHIFT_EXPR, tmask,
3320 size_int (precision - size),
3322 size_int (precision - size), 0));
3325 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3326 represents the sign bit of EXP's type. If EXP represents a sign
3327 or zero extension, also test VAL against the unextended type.
3328 The return value is the (sub)expression whose sign bit is VAL,
3329 or NULL_TREE otherwise. */
3332 sign_bit_p (tree exp, tree val)
3334 unsigned HOST_WIDE_INT mask_lo, lo;
3335 HOST_WIDE_INT mask_hi, hi;
3339 /* Tree EXP must have an integral type. */
3340 t = TREE_TYPE (exp);
3341 if (! INTEGRAL_TYPE_P (t))
3344 /* Tree VAL must be an integer constant. */
3345 if (TREE_CODE (val) != INTEGER_CST
3346 || TREE_CONSTANT_OVERFLOW (val))
3349 width = TYPE_PRECISION (t);
3350 if (width > HOST_BITS_PER_WIDE_INT)
3352 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3355 mask_hi = ((unsigned HOST_WIDE_INT) -1
3356 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3362 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3365 mask_lo = ((unsigned HOST_WIDE_INT) -1
3366 >> (HOST_BITS_PER_WIDE_INT - width));
3369 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3370 treat VAL as if it were unsigned. */
3371 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3372 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3375 /* Handle extension from a narrower type. */
3376 if (TREE_CODE (exp) == NOP_EXPR
3377 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3378 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3383 /* Subroutine for fold_truthop: determine if an operand is simple enough
3384 to be evaluated unconditionally. */
3387 simple_operand_p (tree exp)
3389 /* Strip any conversions that don't change the machine mode. */
3390 while ((TREE_CODE (exp) == NOP_EXPR
3391 || TREE_CODE (exp) == CONVERT_EXPR)
3392 && (TYPE_MODE (TREE_TYPE (exp))
3393 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3394 exp = TREE_OPERAND (exp, 0);
3396 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3398 && ! TREE_ADDRESSABLE (exp)
3399 && ! TREE_THIS_VOLATILE (exp)
3400 && ! DECL_NONLOCAL (exp)
3401 /* Don't regard global variables as simple. They may be
3402 allocated in ways unknown to the compiler (shared memory,
3403 #pragma weak, etc). */
3404 && ! TREE_PUBLIC (exp)
3405 && ! DECL_EXTERNAL (exp)
3406 /* Loading a static variable is unduly expensive, but global
3407 registers aren't expensive. */
3408 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3411 /* The following functions are subroutines to fold_range_test and allow it to
3412 try to change a logical combination of comparisons into a range test.
3415 X == 2 || X == 3 || X == 4 || X == 5
3419 (unsigned) (X - 2) <= 3
3421 We describe each set of comparisons as being either inside or outside
3422 a range, using a variable named like IN_P, and then describe the
3423 range with a lower and upper bound. If one of the bounds is omitted,
3424 it represents either the highest or lowest value of the type.
3426 In the comments below, we represent a range by two numbers in brackets
3427 preceded by a "+" to designate being inside that range, or a "-" to
3428 designate being outside that range, so the condition can be inverted by
3429 flipping the prefix. An omitted bound is represented by a "-". For
3430 example, "- [-, 10]" means being outside the range starting at the lowest
3431 possible value and ending at 10, in other words, being greater than 10.
3432 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3435 We set up things so that the missing bounds are handled in a consistent
3436 manner so neither a missing bound nor "true" and "false" need to be
3437 handled using a special case. */
3439 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3440 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3441 and UPPER1_P are nonzero if the respective argument is an upper bound
3442 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3443 must be specified for a comparison. ARG1 will be converted to ARG0's
3444 type if both are specified. */
3447 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3448 tree arg1, int upper1_p)
3454 /* If neither arg represents infinity, do the normal operation.
3455 Else, if not a comparison, return infinity. Else handle the special
3456 comparison rules. Note that most of the cases below won't occur, but
3457 are handled for consistency. */
3459 if (arg0 != 0 && arg1 != 0)
3461 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3462 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3464 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3467 if (TREE_CODE_CLASS (code) != '<')
3470 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3471 for neither. In real maths, we cannot assume open ended ranges are
3472 the same. But, this is computer arithmetic, where numbers are finite.
3473 We can therefore make the transformation of any unbounded range with
3474 the value Z, Z being greater than any representable number. This permits
3475 us to treat unbounded ranges as equal. */
3476 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3477 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3481 result = sgn0 == sgn1;
3484 result = sgn0 != sgn1;
3487 result = sgn0 < sgn1;
3490 result = sgn0 <= sgn1;
3493 result = sgn0 > sgn1;
3496 result = sgn0 >= sgn1;
3502 return constant_boolean_node (result, type);
3505 /* Given EXP, a logical expression, set the range it is testing into
3506 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3507 actually being tested. *PLOW and *PHIGH will be made of the same type
3508 as the returned expression. If EXP is not a comparison, we will most
3509 likely not be returning a useful value and range. */
3512 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3514 enum tree_code code;
3515 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3516 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3518 tree low, high, n_low, n_high;
3520 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3521 and see if we can refine the range. Some of the cases below may not
3522 happen, but it doesn't seem worth worrying about this. We "continue"
3523 the outer loop when we've changed something; otherwise we "break"
3524 the switch, which will "break" the while. */
3527 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3531 code = TREE_CODE (exp);
3532 exp_type = TREE_TYPE (exp);
3534 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3536 if (first_rtl_op (code) > 0)
3537 arg0 = TREE_OPERAND (exp, 0);
3538 if (TREE_CODE_CLASS (code) == '<'
3539 || TREE_CODE_CLASS (code) == '1'
3540 || TREE_CODE_CLASS (code) == '2')
3541 arg0_type = TREE_TYPE (arg0);
3542 if (TREE_CODE_CLASS (code) == '2'
3543 || TREE_CODE_CLASS (code) == '<'
3544 || (TREE_CODE_CLASS (code) == 'e'
3545 && TREE_CODE_LENGTH (code) > 1))
3546 arg1 = TREE_OPERAND (exp, 1);
3551 case TRUTH_NOT_EXPR:
3552 in_p = ! in_p, exp = arg0;
3555 case EQ_EXPR: case NE_EXPR:
3556 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3557 /* We can only do something if the range is testing for zero
3558 and if the second operand is an integer constant. Note that
3559 saying something is "in" the range we make is done by
3560 complementing IN_P since it will set in the initial case of
3561 being not equal to zero; "out" is leaving it alone. */
3562 if (low == 0 || high == 0
3563 || ! integer_zerop (low) || ! integer_zerop (high)
3564 || TREE_CODE (arg1) != INTEGER_CST)
3569 case NE_EXPR: /* - [c, c] */
3572 case EQ_EXPR: /* + [c, c] */
3573 in_p = ! in_p, low = high = arg1;
3575 case GT_EXPR: /* - [-, c] */
3576 low = 0, high = arg1;
3578 case GE_EXPR: /* + [c, -] */
3579 in_p = ! in_p, low = arg1, high = 0;
3581 case LT_EXPR: /* - [c, -] */
3582 low = arg1, high = 0;
3584 case LE_EXPR: /* + [-, c] */
3585 in_p = ! in_p, low = 0, high = arg1;
3591 /* If this is an unsigned comparison, we also know that EXP is
3592 greater than or equal to zero. We base the range tests we make
3593 on that fact, so we record it here so we can parse existing
3594 range tests. We test arg0_type since often the return type
3595 of, e.g. EQ_EXPR, is boolean. */
3596 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3598 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3600 fold_convert (arg0_type, integer_zero_node),
3604 in_p = n_in_p, low = n_low, high = n_high;
3606 /* If the high bound is missing, but we have a nonzero low
3607 bound, reverse the range so it goes from zero to the low bound
3609 if (high == 0 && low && ! integer_zerop (low))
3612 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3613 integer_one_node, 0);
3614 low = fold_convert (arg0_type, integer_zero_node);
3622 /* (-x) IN [a,b] -> x in [-b, -a] */
3623 n_low = range_binop (MINUS_EXPR, exp_type,
3624 fold_convert (exp_type, integer_zero_node),
3626 n_high = range_binop (MINUS_EXPR, exp_type,
3627 fold_convert (exp_type, integer_zero_node),
3629 low = n_low, high = n_high;
3635 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3636 fold_convert (exp_type, integer_one_node));
3639 case PLUS_EXPR: case MINUS_EXPR:
3640 if (TREE_CODE (arg1) != INTEGER_CST)
3643 /* If EXP is signed, any overflow in the computation is undefined,
3644 so we don't worry about it so long as our computations on
3645 the bounds don't overflow. For unsigned, overflow is defined
3646 and this is exactly the right thing. */
3647 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3648 arg0_type, low, 0, arg1, 0);
3649 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3650 arg0_type, high, 1, arg1, 0);
3651 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3652 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3655 /* Check for an unsigned range which has wrapped around the maximum
3656 value thus making n_high < n_low, and normalize it. */
3657 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3659 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3660 integer_one_node, 0);
3661 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3662 integer_one_node, 0);
3664 /* If the range is of the form +/- [ x+1, x ], we won't
3665 be able to normalize it. But then, it represents the
3666 whole range or the empty set, so make it
3668 if (tree_int_cst_equal (n_low, low)
3669 && tree_int_cst_equal (n_high, high))
3675 low = n_low, high = n_high;
3680 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3681 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3684 if (! INTEGRAL_TYPE_P (arg0_type)
3685 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3686 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3689 n_low = low, n_high = high;
3692 n_low = fold_convert (arg0_type, n_low);
3695 n_high = fold_convert (arg0_type, n_high);
3698 /* If we're converting arg0 from an unsigned type, to exp,
3699 a signed type, we will be doing the comparison as unsigned.
3700 The tests above have already verified that LOW and HIGH
3703 So we have to ensure that we will handle large unsigned
3704 values the same way that the current signed bounds treat
3707 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3710 tree equiv_type = lang_hooks.types.type_for_mode
3711 (TYPE_MODE (arg0_type), 1);
3713 /* A range without an upper bound is, naturally, unbounded.
3714 Since convert would have cropped a very large value, use
3715 the max value for the destination type. */
3717 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3718 : TYPE_MAX_VALUE (arg0_type);
3720 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3721 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3722 fold_convert (arg0_type,
3724 fold_convert (arg0_type,
3725 integer_one_node)));
3727 /* If the low bound is specified, "and" the range with the
3728 range for which the original unsigned value will be
3732 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3733 1, n_low, n_high, 1,
3734 fold_convert (arg0_type,
3739 in_p = (n_in_p == in_p);
3743 /* Otherwise, "or" the range with the range of the input
3744 that will be interpreted as negative. */
3745 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3746 0, n_low, n_high, 1,
3747 fold_convert (arg0_type,
3752 in_p = (in_p != n_in_p);
3757 low = n_low, high = n_high;
3767 /* If EXP is a constant, we can evaluate whether this is true or false. */
3768 if (TREE_CODE (exp) == INTEGER_CST)
3770 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3772 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3778 *pin_p = in_p, *plow = low, *phigh = high;
3782 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3783 type, TYPE, return an expression to test if EXP is in (or out of, depending
3784 on IN_P) the range. Return 0 if the test couldn't be created. */
3787 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3789 tree etype = TREE_TYPE (exp);
3794 value = build_range_check (type, exp, 1, low, high);
3796 return invert_truthvalue (value);
3801 if (low == 0 && high == 0)
3802 return fold_convert (type, integer_one_node);
3805 return fold (build2 (LE_EXPR, type, exp, high));
3808 return fold (build2 (GE_EXPR, type, exp, low));
3810 if (operand_equal_p (low, high, 0))
3811 return fold (build2 (EQ_EXPR, type, exp, low));
3813 if (integer_zerop (low))
3815 if (! TYPE_UNSIGNED (etype))
3817 etype = lang_hooks.types.unsigned_type (etype);
3818 high = fold_convert (etype, high);
3819 exp = fold_convert (etype, exp);
3821 return build_range_check (type, exp, 1, 0, high);
3824 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3825 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3827 unsigned HOST_WIDE_INT lo;
3831 prec = TYPE_PRECISION (etype);
3832 if (prec <= HOST_BITS_PER_WIDE_INT)
3835 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3839 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3840 lo = (unsigned HOST_WIDE_INT) -1;
3843 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3845 if (TYPE_UNSIGNED (etype))
3847 etype = lang_hooks.types.signed_type (etype);
3848 exp = fold_convert (etype, exp);
3850 return fold (build2 (GT_EXPR, type, exp,
3851 fold_convert (etype, integer_zero_node)));
3855 value = const_binop (MINUS_EXPR, high, low, 0);
3856 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3858 tree utype, minv, maxv;
3860 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3861 for the type in question, as we rely on this here. */
3862 switch (TREE_CODE (etype))
3867 utype = lang_hooks.types.unsigned_type (etype);
3868 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3869 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3870 integer_one_node, 1);
3871 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3872 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3876 high = fold_convert (etype, high);
3877 low = fold_convert (etype, low);
3878 exp = fold_convert (etype, exp);
3879 value = const_binop (MINUS_EXPR, high, low, 0);
3887 if (value != 0 && ! TREE_OVERFLOW (value))
3888 return build_range_check (type,
3889 fold (build2 (MINUS_EXPR, etype, exp, low)),
3890 1, fold_convert (etype, integer_zero_node),
3896 /* Given two ranges, see if we can merge them into one. Return 1 if we
3897 can, 0 if we can't. Set the output range into the specified parameters. */
3900 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3901 tree high0, int in1_p, tree low1, tree high1)
3909 int lowequal = ((low0 == 0 && low1 == 0)
3910 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3911 low0, 0, low1, 0)));
3912 int highequal = ((high0 == 0 && high1 == 0)
3913 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3914 high0, 1, high1, 1)));
3916 /* Make range 0 be the range that starts first, or ends last if they
3917 start at the same value. Swap them if it isn't. */
3918 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3921 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3922 high1, 1, high0, 1))))
3924 temp = in0_p, in0_p = in1_p, in1_p = temp;
3925 tem = low0, low0 = low1, low1 = tem;
3926 tem = high0, high0 = high1, high1 = tem;
3929 /* Now flag two cases, whether the ranges are disjoint or whether the
3930 second range is totally subsumed in the first. Note that the tests
3931 below are simplified by the ones above. */
3932 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3933 high0, 1, low1, 0));
3934 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3935 high1, 1, high0, 1));
3937 /* We now have four cases, depending on whether we are including or
3938 excluding the two ranges. */
3941 /* If they don't overlap, the result is false. If the second range
3942 is a subset it is the result. Otherwise, the range is from the start
3943 of the second to the end of the first. */
3945 in_p = 0, low = high = 0;
3947 in_p = 1, low = low1, high = high1;
3949 in_p = 1, low = low1, high = high0;
3952 else if (in0_p && ! in1_p)
3954 /* If they don't overlap, the result is the first range. If they are
3955 equal, the result is false. If the second range is a subset of the
3956 first, and the ranges begin at the same place, we go from just after
3957 the end of the first range to the end of the second. If the second
3958 range is not a subset of the first, or if it is a subset and both
3959 ranges end at the same place, the range starts at the start of the
3960 first range and ends just before the second range.
3961 Otherwise, we can't describe this as a single range. */
3963 in_p = 1, low = low0, high = high0;
3964 else if (lowequal && highequal)
3965 in_p = 0, low = high = 0;
3966 else if (subset && lowequal)
3968 in_p = 1, high = high0;
3969 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3970 integer_one_node, 0);
3972 else if (! subset || highequal)
3974 in_p = 1, low = low0;
3975 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3976 integer_one_node, 0);
3982 else if (! in0_p && in1_p)
3984 /* If they don't overlap, the result is the second range. If the second
3985 is a subset of the first, the result is false. Otherwise,
3986 the range starts just after the first range and ends at the
3987 end of the second. */
3989 in_p = 1, low = low1, high = high1;
3990 else if (subset || highequal)
3991 in_p = 0, low = high = 0;
3994 in_p = 1, high = high1;
3995 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3996 integer_one_node, 0);
4002 /* The case where we are excluding both ranges. Here the complex case
4003 is if they don't overlap. In that case, the only time we have a
4004 range is if they are adjacent. If the second is a subset of the
4005 first, the result is the first. Otherwise, the range to exclude
4006 starts at the beginning of the first range and ends at the end of the
4010 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4011 range_binop (PLUS_EXPR, NULL_TREE,
4013 integer_one_node, 1),
4015 in_p = 0, low = low0, high = high1;
4018 /* Canonicalize - [min, x] into - [-, x]. */
4019 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4020 switch (TREE_CODE (TREE_TYPE (low0)))
4023 if (TYPE_PRECISION (TREE_TYPE (low0))
4024 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4029 if (tree_int_cst_equal (low0,
4030 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4034 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4035 && integer_zerop (low0))
4042 /* Canonicalize - [x, max] into - [x, -]. */
4043 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4044 switch (TREE_CODE (TREE_TYPE (high1)))
4047 if (TYPE_PRECISION (TREE_TYPE (high1))
4048 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4053 if (tree_int_cst_equal (high1,
4054 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4058 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4059 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4061 integer_one_node, 1)))
4068 /* The ranges might be also adjacent between the maximum and
4069 minimum values of the given type. For
4070 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4071 return + [x + 1, y - 1]. */
4072 if (low0 == 0 && high1 == 0)
4074 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4075 integer_one_node, 1);
4076 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4077 integer_one_node, 0);
4078 if (low == 0 || high == 0)
4088 in_p = 0, low = low0, high = high0;
4090 in_p = 0, low = low0, high = high1;
4093 *pin_p = in_p, *plow = low, *phigh = high;
4098 /* Subroutine of fold, looking inside expressions of the form
4099 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4100 of the COND_EXPR. This function is being used also to optimize
4101 A op B ? C : A, by reversing the comparison first.
4103 Return a folded expression whose code is not a COND_EXPR
4104 anymore, or NULL_TREE if no folding opportunity is found. */
4107 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4109 enum tree_code comp_code = TREE_CODE (arg0);
4110 tree arg00 = TREE_OPERAND (arg0, 0);
4111 tree arg01 = TREE_OPERAND (arg0, 1);
4112 tree arg1_type = TREE_TYPE (arg1);
4118 /* If we have A op 0 ? A : -A, consider applying the following
4121 A == 0? A : -A same as -A
4122 A != 0? A : -A same as A
4123 A >= 0? A : -A same as abs (A)
4124 A > 0? A : -A same as abs (A)
4125 A <= 0? A : -A same as -abs (A)
4126 A < 0? A : -A same as -abs (A)
4128 None of these transformations work for modes with signed
4129 zeros. If A is +/-0, the first two transformations will
4130 change the sign of the result (from +0 to -0, or vice
4131 versa). The last four will fix the sign of the result,
4132 even though the original expressions could be positive or
4133 negative, depending on the sign of A.
4135 Note that all these transformations are correct if A is
4136 NaN, since the two alternatives (A and -A) are also NaNs. */
4137 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4138 ? real_zerop (arg01)
4139 : integer_zerop (arg01))
4140 && TREE_CODE (arg2) == NEGATE_EXPR
4141 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4145 tem = fold_convert (arg1_type, arg1);
4146 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4148 return pedantic_non_lvalue (fold_convert (type, arg1));
4151 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4152 arg1 = fold_convert (lang_hooks.types.signed_type
4153 (TREE_TYPE (arg1)), arg1);
4154 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4155 return pedantic_non_lvalue (fold_convert (type, tem));
4158 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4159 arg1 = fold_convert (lang_hooks.types.signed_type
4160 (TREE_TYPE (arg1)), arg1);
4161 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4162 return negate_expr (fold_convert (type, tem));
4167 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4168 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4169 both transformations are correct when A is NaN: A != 0
4170 is then true, and A == 0 is false. */
4172 if (integer_zerop (arg01) && integer_zerop (arg2))
4174 if (comp_code == NE_EXPR)
4175 return pedantic_non_lvalue (fold_convert (type, arg1));
4176 else if (comp_code == EQ_EXPR)
4177 return fold_convert (type, integer_zero_node);
4180 /* Try some transformations of A op B ? A : B.
4182 A == B? A : B same as B
4183 A != B? A : B same as A
4184 A >= B? A : B same as max (A, B)
4185 A > B? A : B same as max (B, A)
4186 A <= B? A : B same as min (A, B)
4187 A < B? A : B same as min (B, A)
4189 As above, these transformations don't work in the presence
4190 of signed zeros. For example, if A and B are zeros of
4191 opposite sign, the first two transformations will change
4192 the sign of the result. In the last four, the original
4193 expressions give different results for (A=+0, B=-0) and
4194 (A=-0, B=+0), but the transformed expressions do not.
4196 The first two transformations are correct if either A or B
4197 is a NaN. In the first transformation, the condition will
4198 be false, and B will indeed be chosen. In the case of the
4199 second transformation, the condition A != B will be true,
4200 and A will be chosen.
4202 The conversions to max() and min() are not correct if B is
4203 a number and A is not. The conditions in the original
4204 expressions will be false, so all four give B. The min()
4205 and max() versions would give a NaN instead. */
4206 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4208 tree comp_op0 = arg00;
4209 tree comp_op1 = arg01;
4210 tree comp_type = TREE_TYPE (comp_op0);
4212 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4213 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4223 return pedantic_non_lvalue (fold_convert (type, arg2));
4225 return pedantic_non_lvalue (fold_convert (type, arg1));
4228 /* In C++ a ?: expression can be an lvalue, so put the
4229 operand which will be used if they are equal first
4230 so that we can convert this back to the
4231 corresponding COND_EXPR. */
4232 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4234 comp_op0 = fold_convert (comp_type, comp_op0);
4235 comp_op1 = fold_convert (comp_type, comp_op1);
4236 tem = fold (build2 (MIN_EXPR, comp_type,
4237 (comp_code == LE_EXPR
4238 ? comp_op0 : comp_op1),
4239 (comp_code == LE_EXPR
4240 ? comp_op1 : comp_op0)));
4241 return pedantic_non_lvalue (fold_convert (type, tem));
4246 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4248 comp_op0 = fold_convert (comp_type, comp_op0);
4249 comp_op1 = fold_convert (comp_type, comp_op1);
4250 tem = fold (build2 (MAX_EXPR, comp_type,
4251 (comp_code == GE_EXPR
4252 ? comp_op0 : comp_op1),
4253 (comp_code == GE_EXPR
4254 ? comp_op1 : comp_op0)));
4255 tem = fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1));
4256 return pedantic_non_lvalue (fold_convert (type, tem));
4264 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4265 we might still be able to simplify this. For example,
4266 if C1 is one less or one more than C2, this might have started
4267 out as a MIN or MAX and been transformed by this function.
4268 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4270 if (INTEGRAL_TYPE_P (type)
4271 && TREE_CODE (arg01) == INTEGER_CST
4272 && TREE_CODE (arg2) == INTEGER_CST)
4276 /* We can replace A with C1 in this case. */
4277 arg1 = fold_convert (type, arg01);
4278 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4281 /* If C1 is C2 + 1, this is min(A, C2). */
4282 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4284 && operand_equal_p (arg01,
4285 const_binop (PLUS_EXPR, arg2,
4286 integer_one_node, 0),
4288 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4289 type, arg1, arg2)));
4293 /* If C1 is C2 - 1, this is min(A, C2). */
4294 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4296 && operand_equal_p (arg01,
4297 const_binop (MINUS_EXPR, arg2,
4298 integer_one_node, 0),
4300 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4301 type, arg1, arg2)));
4305 /* If C1 is C2 - 1, this is max(A, C2). */
4306 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4308 && operand_equal_p (arg01,
4309 const_binop (MINUS_EXPR, arg2,
4310 integer_one_node, 0),
4312 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4313 type, arg1, arg2)));
4317 /* If C1 is C2 + 1, this is max(A, C2). */
4318 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4320 && operand_equal_p (arg01,
4321 const_binop (PLUS_EXPR, arg2,
4322 integer_one_node, 0),
4324 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4325 type, arg1, arg2)));
4338 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4339 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4342 /* EXP is some logical combination of boolean tests. See if we can
4343 merge it into some range test. Return the new tree if so. */
4346 fold_range_test (tree exp)
4348 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4349 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4350 int in0_p, in1_p, in_p;
4351 tree low0, low1, low, high0, high1, high;
4352 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4353 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4356 /* If this is an OR operation, invert both sides; we will invert
4357 again at the end. */
4359 in0_p = ! in0_p, in1_p = ! in1_p;
4361 /* If both expressions are the same, if we can merge the ranges, and we
4362 can build the range test, return it or it inverted. If one of the
4363 ranges is always true or always false, consider it to be the same
4364 expression as the other. */
4365 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4366 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4368 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4370 : rhs != 0 ? rhs : integer_zero_node,
4372 return or_op ? invert_truthvalue (tem) : tem;
4374 /* On machines where the branch cost is expensive, if this is a
4375 short-circuited branch and the underlying object on both sides
4376 is the same, make a non-short-circuit operation. */
4377 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4378 && lhs != 0 && rhs != 0
4379 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4380 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4381 && operand_equal_p (lhs, rhs, 0))
4383 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4384 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4385 which cases we can't do this. */
4386 if (simple_operand_p (lhs))
4387 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4388 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4389 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4390 TREE_OPERAND (exp, 1));
4392 else if (lang_hooks.decls.global_bindings_p () == 0
4393 && ! CONTAINS_PLACEHOLDER_P (lhs))
4395 tree common = save_expr (lhs);
4397 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4398 or_op ? ! in0_p : in0_p,
4400 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4401 or_op ? ! in1_p : in1_p,
4403 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4404 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4405 TREE_TYPE (exp), lhs, rhs);
4412 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4413 bit value. Arrange things so the extra bits will be set to zero if and
4414 only if C is signed-extended to its full width. If MASK is nonzero,
4415 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4418 unextend (tree c, int p, int unsignedp, tree mask)
4420 tree type = TREE_TYPE (c);
4421 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4424 if (p == modesize || unsignedp)
4427 /* We work by getting just the sign bit into the low-order bit, then
4428 into the high-order bit, then sign-extend. We then XOR that value
4430 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4431 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4433 /* We must use a signed type in order to get an arithmetic right shift.
4434 However, we must also avoid introducing accidental overflows, so that
4435 a subsequent call to integer_zerop will work. Hence we must
4436 do the type conversion here. At this point, the constant is either
4437 zero or one, and the conversion to a signed type can never overflow.
4438 We could get an overflow if this conversion is done anywhere else. */
4439 if (TYPE_UNSIGNED (type))
4440 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4442 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4443 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4445 temp = const_binop (BIT_AND_EXPR, temp,
4446 fold_convert (TREE_TYPE (c), mask), 0);
4447 /* If necessary, convert the type back to match the type of C. */
4448 if (TYPE_UNSIGNED (type))
4449 temp = fold_convert (type, temp);
4451 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4454 /* Find ways of folding logical expressions of LHS and RHS:
4455 Try to merge two comparisons to the same innermost item.
4456 Look for range tests like "ch >= '0' && ch <= '9'".
4457 Look for combinations of simple terms on machines with expensive branches
4458 and evaluate the RHS unconditionally.
4460 For example, if we have p->a == 2 && p->b == 4 and we can make an
4461 object large enough to span both A and B, we can do this with a comparison
4462 against the object ANDed with the a mask.
4464 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4465 operations to do this with one comparison.
4467 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4468 function and the one above.
4470 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4471 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4473 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4476 We return the simplified tree or 0 if no optimization is possible. */
4479 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4481 /* If this is the "or" of two comparisons, we can do something if
4482 the comparisons are NE_EXPR. If this is the "and", we can do something
4483 if the comparisons are EQ_EXPR. I.e.,
4484 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4486 WANTED_CODE is this operation code. For single bit fields, we can
4487 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4488 comparison for one-bit fields. */
4490 enum tree_code wanted_code;
4491 enum tree_code lcode, rcode;
4492 tree ll_arg, lr_arg, rl_arg, rr_arg;
4493 tree ll_inner, lr_inner, rl_inner, rr_inner;
4494 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4495 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4496 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4497 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4498 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4499 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4500 enum machine_mode lnmode, rnmode;
4501 tree ll_mask, lr_mask, rl_mask, rr_mask;
4502 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4503 tree l_const, r_const;
4504 tree lntype, rntype, result;
4505 int first_bit, end_bit;
4508 /* Start by getting the comparison codes. Fail if anything is volatile.
4509 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4510 it were surrounded with a NE_EXPR. */
4512 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4515 lcode = TREE_CODE (lhs);
4516 rcode = TREE_CODE (rhs);
4518 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4520 lhs = build2 (NE_EXPR, truth_type, lhs,
4521 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4525 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4527 rhs = build2 (NE_EXPR, truth_type, rhs,
4528 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4532 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4535 ll_arg = TREE_OPERAND (lhs, 0);
4536 lr_arg = TREE_OPERAND (lhs, 1);
4537 rl_arg = TREE_OPERAND (rhs, 0);
4538 rr_arg = TREE_OPERAND (rhs, 1);
4540 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4541 if (simple_operand_p (ll_arg)
4542 && simple_operand_p (lr_arg))
4545 if (operand_equal_p (ll_arg, rl_arg, 0)
4546 && operand_equal_p (lr_arg, rr_arg, 0))
4548 result = combine_comparisons (code, lcode, rcode,
4549 truth_type, ll_arg, lr_arg);
4553 else if (operand_equal_p (ll_arg, rr_arg, 0)
4554 && operand_equal_p (lr_arg, rl_arg, 0))
4556 result = combine_comparisons (code, lcode,
4557 swap_tree_comparison (rcode),
4558 truth_type, ll_arg, lr_arg);
4564 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4565 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4567 /* If the RHS can be evaluated unconditionally and its operands are
4568 simple, it wins to evaluate the RHS unconditionally on machines
4569 with expensive branches. In this case, this isn't a comparison
4570 that can be merged. Avoid doing this if the RHS is a floating-point
4571 comparison since those can trap. */
4573 if (BRANCH_COST >= 2
4574 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4575 && simple_operand_p (rl_arg)
4576 && simple_operand_p (rr_arg))
4578 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4579 if (code == TRUTH_OR_EXPR
4580 && lcode == NE_EXPR && integer_zerop (lr_arg)
4581 && rcode == NE_EXPR && integer_zerop (rr_arg)
4582 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4583 return build2 (NE_EXPR, truth_type,
4584 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4586 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4588 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4589 if (code == TRUTH_AND_EXPR
4590 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4591 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4592 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4593 return build2 (EQ_EXPR, truth_type,
4594 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4596 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4598 return build2 (code, truth_type, lhs, rhs);
4601 /* See if the comparisons can be merged. Then get all the parameters for
4604 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4605 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4609 ll_inner = decode_field_reference (ll_arg,
4610 &ll_bitsize, &ll_bitpos, &ll_mode,
4611 &ll_unsignedp, &volatilep, &ll_mask,
4613 lr_inner = decode_field_reference (lr_arg,
4614 &lr_bitsize, &lr_bitpos, &lr_mode,
4615 &lr_unsignedp, &volatilep, &lr_mask,
4617 rl_inner = decode_field_reference (rl_arg,
4618 &rl_bitsize, &rl_bitpos, &rl_mode,
4619 &rl_unsignedp, &volatilep, &rl_mask,
4621 rr_inner = decode_field_reference (rr_arg,
4622 &rr_bitsize, &rr_bitpos, &rr_mode,
4623 &rr_unsignedp, &volatilep, &rr_mask,
4626 /* It must be true that the inner operation on the lhs of each
4627 comparison must be the same if we are to be able to do anything.
4628 Then see if we have constants. If not, the same must be true for
4630 if (volatilep || ll_inner == 0 || rl_inner == 0
4631 || ! operand_equal_p (ll_inner, rl_inner, 0))
4634 if (TREE_CODE (lr_arg) == INTEGER_CST
4635 && TREE_CODE (rr_arg) == INTEGER_CST)
4636 l_const = lr_arg, r_const = rr_arg;
4637 else if (lr_inner == 0 || rr_inner == 0
4638 || ! operand_equal_p (lr_inner, rr_inner, 0))
4641 l_const = r_const = 0;
4643 /* If either comparison code is not correct for our logical operation,
4644 fail. However, we can convert a one-bit comparison against zero into
4645 the opposite comparison against that bit being set in the field. */
4647 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4648 if (lcode != wanted_code)
4650 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4652 /* Make the left operand unsigned, since we are only interested
4653 in the value of one bit. Otherwise we are doing the wrong
4662 /* This is analogous to the code for l_const above. */
4663 if (rcode != wanted_code)
4665 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4674 /* After this point all optimizations will generate bit-field
4675 references, which we might not want. */
4676 if (! lang_hooks.can_use_bit_fields_p ())
4679 /* See if we can find a mode that contains both fields being compared on
4680 the left. If we can't, fail. Otherwise, update all constants and masks
4681 to be relative to a field of that size. */
4682 first_bit = MIN (ll_bitpos, rl_bitpos);
4683 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4684 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4685 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4687 if (lnmode == VOIDmode)
4690 lnbitsize = GET_MODE_BITSIZE (lnmode);
4691 lnbitpos = first_bit & ~ (lnbitsize - 1);
4692 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4693 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4695 if (BYTES_BIG_ENDIAN)
4697 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4698 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4701 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4702 size_int (xll_bitpos), 0);
4703 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4704 size_int (xrl_bitpos), 0);
4708 l_const = fold_convert (lntype, l_const);
4709 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4710 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4711 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4712 fold (build1 (BIT_NOT_EXPR,
4716 warning ("comparison is always %d", wanted_code == NE_EXPR);
4718 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4723 r_const = fold_convert (lntype, r_const);
4724 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4725 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4726 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4727 fold (build1 (BIT_NOT_EXPR,
4731 warning ("comparison is always %d", wanted_code == NE_EXPR);
4733 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4737 /* If the right sides are not constant, do the same for it. Also,
4738 disallow this optimization if a size or signedness mismatch occurs
4739 between the left and right sides. */
4742 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4743 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4744 /* Make sure the two fields on the right
4745 correspond to the left without being swapped. */
4746 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4749 first_bit = MIN (lr_bitpos, rr_bitpos);
4750 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4751 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4752 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4754 if (rnmode == VOIDmode)
4757 rnbitsize = GET_MODE_BITSIZE (rnmode);
4758 rnbitpos = first_bit & ~ (rnbitsize - 1);
4759 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4760 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4762 if (BYTES_BIG_ENDIAN)
4764 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4765 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4768 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4769 size_int (xlr_bitpos), 0);
4770 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4771 size_int (xrr_bitpos), 0);
4773 /* Make a mask that corresponds to both fields being compared.
4774 Do this for both items being compared. If the operands are the
4775 same size and the bits being compared are in the same position
4776 then we can do this by masking both and comparing the masked
4778 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4779 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4780 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4782 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4783 ll_unsignedp || rl_unsignedp);
4784 if (! all_ones_mask_p (ll_mask, lnbitsize))
4785 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4787 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4788 lr_unsignedp || rr_unsignedp);
4789 if (! all_ones_mask_p (lr_mask, rnbitsize))
4790 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4792 return build2 (wanted_code, truth_type, lhs, rhs);
4795 /* There is still another way we can do something: If both pairs of
4796 fields being compared are adjacent, we may be able to make a wider
4797 field containing them both.
4799 Note that we still must mask the lhs/rhs expressions. Furthermore,
4800 the mask must be shifted to account for the shift done by
4801 make_bit_field_ref. */
4802 if ((ll_bitsize + ll_bitpos == rl_bitpos
4803 && lr_bitsize + lr_bitpos == rr_bitpos)
4804 || (ll_bitpos == rl_bitpos + rl_bitsize
4805 && lr_bitpos == rr_bitpos + rr_bitsize))
4809 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4810 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4811 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4812 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4814 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4815 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4816 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4817 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4819 /* Convert to the smaller type before masking out unwanted bits. */
4821 if (lntype != rntype)
4823 if (lnbitsize > rnbitsize)
4825 lhs = fold_convert (rntype, lhs);
4826 ll_mask = fold_convert (rntype, ll_mask);
4829 else if (lnbitsize < rnbitsize)
4831 rhs = fold_convert (lntype, rhs);
4832 lr_mask = fold_convert (lntype, lr_mask);
4837 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4838 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4840 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4841 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4843 return build2 (wanted_code, truth_type, lhs, rhs);
4849 /* Handle the case of comparisons with constants. If there is something in
4850 common between the masks, those bits of the constants must be the same.
4851 If not, the condition is always false. Test for this to avoid generating
4852 incorrect code below. */
4853 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4854 if (! integer_zerop (result)
4855 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4856 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4858 if (wanted_code == NE_EXPR)
4860 warning ("`or' of unmatched not-equal tests is always 1");
4861 return constant_boolean_node (true, truth_type);
4865 warning ("`and' of mutually exclusive equal-tests is always 0");
4866 return constant_boolean_node (false, truth_type);
4870 /* Construct the expression we will return. First get the component
4871 reference we will make. Unless the mask is all ones the width of
4872 that field, perform the mask operation. Then compare with the
4874 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4875 ll_unsignedp || rl_unsignedp);
4877 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4878 if (! all_ones_mask_p (ll_mask, lnbitsize))
4879 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4881 return build2 (wanted_code, truth_type, result,
4882 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4885 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4889 optimize_minmax_comparison (tree t)
4891 tree type = TREE_TYPE (t);
4892 tree arg0 = TREE_OPERAND (t, 0);
4893 enum tree_code op_code;
4894 tree comp_const = TREE_OPERAND (t, 1);
4896 int consts_equal, consts_lt;
4899 STRIP_SIGN_NOPS (arg0);
4901 op_code = TREE_CODE (arg0);
4902 minmax_const = TREE_OPERAND (arg0, 1);
4903 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4904 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4905 inner = TREE_OPERAND (arg0, 0);
4907 /* If something does not permit us to optimize, return the original tree. */
4908 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4909 || TREE_CODE (comp_const) != INTEGER_CST
4910 || TREE_CONSTANT_OVERFLOW (comp_const)
4911 || TREE_CODE (minmax_const) != INTEGER_CST
4912 || TREE_CONSTANT_OVERFLOW (minmax_const))
4915 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4916 and GT_EXPR, doing the rest with recursive calls using logical
4918 switch (TREE_CODE (t))
4920 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4922 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4926 fold (build2 (TRUTH_ORIF_EXPR, type,
4927 optimize_minmax_comparison
4928 (build2 (EQ_EXPR, type, arg0, comp_const)),
4929 optimize_minmax_comparison
4930 (build2 (GT_EXPR, type, arg0, comp_const))));
4933 if (op_code == MAX_EXPR && consts_equal)
4934 /* MAX (X, 0) == 0 -> X <= 0 */
4935 return fold (build2 (LE_EXPR, type, inner, comp_const));
4937 else if (op_code == MAX_EXPR && consts_lt)
4938 /* MAX (X, 0) == 5 -> X == 5 */
4939 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4941 else if (op_code == MAX_EXPR)
4942 /* MAX (X, 0) == -1 -> false */
4943 return omit_one_operand (type, integer_zero_node, inner);
4945 else if (consts_equal)
4946 /* MIN (X, 0) == 0 -> X >= 0 */
4947 return fold (build2 (GE_EXPR, type, inner, comp_const));
4950 /* MIN (X, 0) == 5 -> false */
4951 return omit_one_operand (type, integer_zero_node, inner);
4954 /* MIN (X, 0) == -1 -> X == -1 */
4955 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4958 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4959 /* MAX (X, 0) > 0 -> X > 0
4960 MAX (X, 0) > 5 -> X > 5 */
4961 return fold (build2 (GT_EXPR, type, inner, comp_const));
4963 else if (op_code == MAX_EXPR)
4964 /* MAX (X, 0) > -1 -> true */
4965 return omit_one_operand (type, integer_one_node, inner);
4967 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4968 /* MIN (X, 0) > 0 -> false
4969 MIN (X, 0) > 5 -> false */
4970 return omit_one_operand (type, integer_zero_node, inner);
4973 /* MIN (X, 0) > -1 -> X > -1 */
4974 return fold (build2 (GT_EXPR, type, inner, comp_const));
4981 /* T is an integer expression that is being multiplied, divided, or taken a
4982 modulus (CODE says which and what kind of divide or modulus) by a
4983 constant C. See if we can eliminate that operation by folding it with
4984 other operations already in T. WIDE_TYPE, if non-null, is a type that
4985 should be used for the computation if wider than our type.
4987 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4988 (X * 2) + (Y * 4). We must, however, be assured that either the original
4989 expression would not overflow or that overflow is undefined for the type
4990 in the language in question.
4992 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4993 the machine has a multiply-accumulate insn or that this is part of an
4994 addressing calculation.
4996 If we return a non-null expression, it is an equivalent form of the
4997 original computation, but need not be in the original type. */
5000 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5002 /* To avoid exponential search depth, refuse to allow recursion past
5003 three levels. Beyond that (1) it's highly unlikely that we'll find
5004 something interesting and (2) we've probably processed it before
5005 when we built the inner expression. */
5014 ret = extract_muldiv_1 (t, c, code, wide_type);
5021 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5023 tree type = TREE_TYPE (t);
5024 enum tree_code tcode = TREE_CODE (t);
5025 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5026 > GET_MODE_SIZE (TYPE_MODE (type)))
5027 ? wide_type : type);
5029 int same_p = tcode == code;
5030 tree op0 = NULL_TREE, op1 = NULL_TREE;
5032 /* Don't deal with constants of zero here; they confuse the code below. */
5033 if (integer_zerop (c))
5036 if (TREE_CODE_CLASS (tcode) == '1')
5037 op0 = TREE_OPERAND (t, 0);
5039 if (TREE_CODE_CLASS (tcode) == '2')
5040 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5042 /* Note that we need not handle conditional operations here since fold
5043 already handles those cases. So just do arithmetic here. */
5047 /* For a constant, we can always simplify if we are a multiply
5048 or (for divide and modulus) if it is a multiple of our constant. */
5049 if (code == MULT_EXPR
5050 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5051 return const_binop (code, fold_convert (ctype, t),
5052 fold_convert (ctype, c), 0);
5055 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5056 /* If op0 is an expression ... */
5057 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5058 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5059 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5060 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5061 /* ... and is unsigned, and its type is smaller than ctype,
5062 then we cannot pass through as widening. */
5063 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5064 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5065 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5066 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5067 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5068 /* ... or this is a truncation (t is narrower than op0),
5069 then we cannot pass through this narrowing. */
5070 || (GET_MODE_SIZE (TYPE_MODE (type))
5071 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5072 /* ... or signedness changes for division or modulus,
5073 then we cannot pass through this conversion. */
5074 || (code != MULT_EXPR
5075 && (TYPE_UNSIGNED (ctype)
5076 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5079 /* Pass the constant down and see if we can make a simplification. If
5080 we can, replace this expression with the inner simplification for
5081 possible later conversion to our or some other type. */
5082 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5083 && TREE_CODE (t2) == INTEGER_CST
5084 && ! TREE_CONSTANT_OVERFLOW (t2)
5085 && (0 != (t1 = extract_muldiv (op0, t2, code,
5087 ? ctype : NULL_TREE))))
5091 case NEGATE_EXPR: case ABS_EXPR:
5092 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5093 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5096 case MIN_EXPR: case MAX_EXPR:
5097 /* If widening the type changes the signedness, then we can't perform
5098 this optimization as that changes the result. */
5099 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5102 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5103 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5104 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5106 if (tree_int_cst_sgn (c) < 0)
5107 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5109 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5110 fold_convert (ctype, t2)));
5114 case LSHIFT_EXPR: case RSHIFT_EXPR:
5115 /* If the second operand is constant, this is a multiplication
5116 or floor division, by a power of two, so we can treat it that
5117 way unless the multiplier or divisor overflows. Signed
5118 left-shift overflow is implementation-defined rather than
5119 undefined in C90, so do not convert signed left shift into
5121 if (TREE_CODE (op1) == INTEGER_CST
5122 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5123 /* const_binop may not detect overflow correctly,
5124 so check for it explicitly here. */
5125 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5126 && TREE_INT_CST_HIGH (op1) == 0
5127 && 0 != (t1 = fold_convert (ctype,
5128 const_binop (LSHIFT_EXPR,
5131 && ! TREE_OVERFLOW (t1))
5132 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5133 ? MULT_EXPR : FLOOR_DIV_EXPR,
5134 ctype, fold_convert (ctype, op0), t1),
5135 c, code, wide_type);
5138 case PLUS_EXPR: case MINUS_EXPR:
5139 /* See if we can eliminate the operation on both sides. If we can, we
5140 can return a new PLUS or MINUS. If we can't, the only remaining
5141 cases where we can do anything are if the second operand is a
5143 t1 = extract_muldiv (op0, c, code, wide_type);
5144 t2 = extract_muldiv (op1, c, code, wide_type);
5145 if (t1 != 0 && t2 != 0
5146 && (code == MULT_EXPR
5147 /* If not multiplication, we can only do this if both operands
5148 are divisible by c. */
5149 || (multiple_of_p (ctype, op0, c)
5150 && multiple_of_p (ctype, op1, c))))
5151 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5152 fold_convert (ctype, t2)));
5154 /* If this was a subtraction, negate OP1 and set it to be an addition.
5155 This simplifies the logic below. */
5156 if (tcode == MINUS_EXPR)
5157 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5159 if (TREE_CODE (op1) != INTEGER_CST)
5162 /* If either OP1 or C are negative, this optimization is not safe for
5163 some of the division and remainder types while for others we need
5164 to change the code. */
5165 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5167 if (code == CEIL_DIV_EXPR)
5168 code = FLOOR_DIV_EXPR;
5169 else if (code == FLOOR_DIV_EXPR)
5170 code = CEIL_DIV_EXPR;
5171 else if (code != MULT_EXPR
5172 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5176 /* If it's a multiply or a division/modulus operation of a multiple
5177 of our constant, do the operation and verify it doesn't overflow. */
5178 if (code == MULT_EXPR
5179 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5181 op1 = const_binop (code, fold_convert (ctype, op1),
5182 fold_convert (ctype, c), 0);
5183 /* We allow the constant to overflow with wrapping semantics. */
5185 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5191 /* If we have an unsigned type is not a sizetype, we cannot widen
5192 the operation since it will change the result if the original
5193 computation overflowed. */
5194 if (TYPE_UNSIGNED (ctype)
5195 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5199 /* If we were able to eliminate our operation from the first side,
5200 apply our operation to the second side and reform the PLUS. */
5201 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5202 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5204 /* The last case is if we are a multiply. In that case, we can
5205 apply the distributive law to commute the multiply and addition
5206 if the multiplication of the constants doesn't overflow. */
5207 if (code == MULT_EXPR)
5208 return fold (build2 (tcode, ctype,
5209 fold (build2 (code, ctype,
5210 fold_convert (ctype, op0),
5211 fold_convert (ctype, c))),
5217 /* We have a special case here if we are doing something like
5218 (C * 8) % 4 since we know that's zero. */
5219 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5220 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5221 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5222 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5223 return omit_one_operand (type, integer_zero_node, op0);
5225 /* ... fall through ... */
5227 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5228 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5229 /* If we can extract our operation from the LHS, do so and return a
5230 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5231 do something only if the second operand is a constant. */
5233 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5234 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5235 fold_convert (ctype, op1)));
5236 else if (tcode == MULT_EXPR && code == MULT_EXPR
5237 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5238 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5239 fold_convert (ctype, t1)));
5240 else if (TREE_CODE (op1) != INTEGER_CST)
5243 /* If these are the same operation types, we can associate them
5244 assuming no overflow. */
5246 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5247 fold_convert (ctype, c), 0))
5248 && ! TREE_OVERFLOW (t1))
5249 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5251 /* If these operations "cancel" each other, we have the main
5252 optimizations of this pass, which occur when either constant is a
5253 multiple of the other, in which case we replace this with either an
5254 operation or CODE or TCODE.
5256 If we have an unsigned type that is not a sizetype, we cannot do
5257 this since it will change the result if the original computation
5259 if ((! TYPE_UNSIGNED (ctype)
5260 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5262 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5263 || (tcode == MULT_EXPR
5264 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5265 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5267 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5268 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5269 fold_convert (ctype,
5270 const_binop (TRUNC_DIV_EXPR,
5272 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5273 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5274 fold_convert (ctype,
5275 const_binop (TRUNC_DIV_EXPR,
5287 /* Return a node which has the indicated constant VALUE (either 0 or
5288 1), and is of the indicated TYPE. */
5291 constant_boolean_node (int value, tree type)
5293 if (type == integer_type_node)
5294 return value ? integer_one_node : integer_zero_node;
5295 else if (type == boolean_type_node)
5296 return value ? boolean_true_node : boolean_false_node;
5297 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5298 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5299 : integer_zero_node);
5301 return build_int_cst (type, value);
5304 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5305 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5306 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5307 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5308 COND is the first argument to CODE; otherwise (as in the example
5309 given here), it is the second argument. TYPE is the type of the
5310 original expression. Return NULL_TREE if no simplification is
5314 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5315 tree cond, tree arg, int cond_first_p)
5317 tree test, true_value, false_value;
5318 tree lhs = NULL_TREE;
5319 tree rhs = NULL_TREE;
5321 /* This transformation is only worthwhile if we don't have to wrap
5322 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5323 one of the branches once its pushed inside the COND_EXPR. */
5324 if (!TREE_CONSTANT (arg))
5327 if (TREE_CODE (cond) == COND_EXPR)
5329 test = TREE_OPERAND (cond, 0);
5330 true_value = TREE_OPERAND (cond, 1);
5331 false_value = TREE_OPERAND (cond, 2);
5332 /* If this operand throws an expression, then it does not make
5333 sense to try to perform a logical or arithmetic operation
5335 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5337 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5342 tree testtype = TREE_TYPE (cond);
5344 true_value = constant_boolean_node (true, testtype);
5345 false_value = constant_boolean_node (false, testtype);
5349 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5350 : build2 (code, type, arg, true_value));
5352 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5353 : build2 (code, type, arg, false_value));
5355 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5356 return fold_convert (type, test);
5360 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5362 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5363 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5364 ADDEND is the same as X.
5366 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5367 and finite. The problematic cases are when X is zero, and its mode
5368 has signed zeros. In the case of rounding towards -infinity,
5369 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5370 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5373 fold_real_zero_addition_p (tree type, tree addend, int negate)
5375 if (!real_zerop (addend))
5378 /* Don't allow the fold with -fsignaling-nans. */
5379 if (HONOR_SNANS (TYPE_MODE (type)))
5382 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5383 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5386 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5387 if (TREE_CODE (addend) == REAL_CST
5388 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5391 /* The mode has signed zeros, and we have to honor their sign.
5392 In this situation, there is only one case we can return true for.
5393 X - 0 is the same as X unless rounding towards -infinity is
5395 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5398 /* Subroutine of fold() that checks comparisons of built-in math
5399 functions against real constants.
5401 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5402 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5403 is the type of the result and ARG0 and ARG1 are the operands of the
5404 comparison. ARG1 must be a TREE_REAL_CST.
5406 The function returns the constant folded tree if a simplification
5407 can be made, and NULL_TREE otherwise. */
5410 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5411 tree type, tree arg0, tree arg1)
5415 if (BUILTIN_SQRT_P (fcode))
5417 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5418 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5420 c = TREE_REAL_CST (arg1);
5421 if (REAL_VALUE_NEGATIVE (c))
5423 /* sqrt(x) < y is always false, if y is negative. */
5424 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5425 return omit_one_operand (type, integer_zero_node, arg);
5427 /* sqrt(x) > y is always true, if y is negative and we
5428 don't care about NaNs, i.e. negative values of x. */
5429 if (code == NE_EXPR || !HONOR_NANS (mode))
5430 return omit_one_operand (type, integer_one_node, arg);
5432 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5433 return fold (build2 (GE_EXPR, type, arg,
5434 build_real (TREE_TYPE (arg), dconst0)));
5436 else if (code == GT_EXPR || code == GE_EXPR)
5440 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5441 real_convert (&c2, mode, &c2);
5443 if (REAL_VALUE_ISINF (c2))
5445 /* sqrt(x) > y is x == +Inf, when y is very large. */
5446 if (HONOR_INFINITIES (mode))
5447 return fold (build2 (EQ_EXPR, type, arg,
5448 build_real (TREE_TYPE (arg), c2)));
5450 /* sqrt(x) > y is always false, when y is very large
5451 and we don't care about infinities. */
5452 return omit_one_operand (type, integer_zero_node, arg);
5455 /* sqrt(x) > c is the same as x > c*c. */
5456 return fold (build2 (code, type, arg,
5457 build_real (TREE_TYPE (arg), c2)));
5459 else if (code == LT_EXPR || code == LE_EXPR)
5463 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5464 real_convert (&c2, mode, &c2);
5466 if (REAL_VALUE_ISINF (c2))
5468 /* sqrt(x) < y is always true, when y is a very large
5469 value and we don't care about NaNs or Infinities. */
5470 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5471 return omit_one_operand (type, integer_one_node, arg);
5473 /* sqrt(x) < y is x != +Inf when y is very large and we
5474 don't care about NaNs. */
5475 if (! HONOR_NANS (mode))
5476 return fold (build2 (NE_EXPR, type, arg,
5477 build_real (TREE_TYPE (arg), c2)));
5479 /* sqrt(x) < y is x >= 0 when y is very large and we
5480 don't care about Infinities. */
5481 if (! HONOR_INFINITIES (mode))
5482 return fold (build2 (GE_EXPR, type, arg,
5483 build_real (TREE_TYPE (arg), dconst0)));
5485 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5486 if (lang_hooks.decls.global_bindings_p () != 0
5487 || CONTAINS_PLACEHOLDER_P (arg))
5490 arg = save_expr (arg);
5491 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5492 fold (build2 (GE_EXPR, type, arg,
5493 build_real (TREE_TYPE (arg),
5495 fold (build2 (NE_EXPR, type, arg,
5496 build_real (TREE_TYPE (arg),
5500 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5501 if (! HONOR_NANS (mode))
5502 return fold (build2 (code, type, arg,
5503 build_real (TREE_TYPE (arg), c2)));
5505 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5506 if (lang_hooks.decls.global_bindings_p () == 0
5507 && ! CONTAINS_PLACEHOLDER_P (arg))
5509 arg = save_expr (arg);
5510 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5511 fold (build2 (GE_EXPR, type, arg,
5512 build_real (TREE_TYPE (arg),
5514 fold (build2 (code, type, arg,
5515 build_real (TREE_TYPE (arg),
5524 /* Subroutine of fold() that optimizes comparisons against Infinities,
5525 either +Inf or -Inf.
5527 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5528 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5529 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5531 The function returns the constant folded tree if a simplification
5532 can be made, and NULL_TREE otherwise. */
5535 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5537 enum machine_mode mode;
5538 REAL_VALUE_TYPE max;
5542 mode = TYPE_MODE (TREE_TYPE (arg0));
5544 /* For negative infinity swap the sense of the comparison. */
5545 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5547 code = swap_tree_comparison (code);
5552 /* x > +Inf is always false, if with ignore sNANs. */
5553 if (HONOR_SNANS (mode))
5555 return omit_one_operand (type, integer_zero_node, arg0);
5558 /* x <= +Inf is always true, if we don't case about NaNs. */
5559 if (! HONOR_NANS (mode))
5560 return omit_one_operand (type, integer_one_node, arg0);
5562 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5563 if (lang_hooks.decls.global_bindings_p () == 0
5564 && ! CONTAINS_PLACEHOLDER_P (arg0))
5566 arg0 = save_expr (arg0);
5567 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5573 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5574 real_maxval (&max, neg, mode);
5575 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5576 arg0, build_real (TREE_TYPE (arg0), max)));
5579 /* x < +Inf is always equal to x <= DBL_MAX. */
5580 real_maxval (&max, neg, mode);
5581 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5582 arg0, build_real (TREE_TYPE (arg0), max)));
5585 /* x != +Inf is always equal to !(x > DBL_MAX). */
5586 real_maxval (&max, neg, mode);
5587 if (! HONOR_NANS (mode))
5588 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5589 arg0, build_real (TREE_TYPE (arg0), max)));
5591 /* The transformation below creates non-gimple code and thus is
5592 not appropriate if we are in gimple form. */
5596 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5597 arg0, build_real (TREE_TYPE (arg0), max)));
5598 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5607 /* Subroutine of fold() that optimizes comparisons of a division by
5608 a nonzero integer constant against an integer constant, i.e.
5611 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5612 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5613 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5615 The function returns the constant folded tree if a simplification
5616 can be made, and NULL_TREE otherwise. */
5619 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5621 tree prod, tmp, hi, lo;
5622 tree arg00 = TREE_OPERAND (arg0, 0);
5623 tree arg01 = TREE_OPERAND (arg0, 1);
5624 unsigned HOST_WIDE_INT lpart;
5625 HOST_WIDE_INT hpart;
5628 /* We have to do this the hard way to detect unsigned overflow.
5629 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5630 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5631 TREE_INT_CST_HIGH (arg01),
5632 TREE_INT_CST_LOW (arg1),
5633 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5634 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5635 prod = force_fit_type (prod, -1, overflow, false);
5637 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5639 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5642 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5643 overflow = add_double (TREE_INT_CST_LOW (prod),
5644 TREE_INT_CST_HIGH (prod),
5645 TREE_INT_CST_LOW (tmp),
5646 TREE_INT_CST_HIGH (tmp),
5648 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5649 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5650 TREE_CONSTANT_OVERFLOW (prod));
5652 else if (tree_int_cst_sgn (arg01) >= 0)
5654 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5655 switch (tree_int_cst_sgn (arg1))
5658 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5663 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5668 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5678 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5679 switch (tree_int_cst_sgn (arg1))
5682 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5687 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5692 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5704 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5705 return omit_one_operand (type, integer_zero_node, arg00);
5706 if (TREE_OVERFLOW (hi))
5707 return fold (build2 (GE_EXPR, type, arg00, lo));
5708 if (TREE_OVERFLOW (lo))
5709 return fold (build2 (LE_EXPR, type, arg00, hi));
5710 return build_range_check (type, arg00, 1, lo, hi);
5713 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5714 return omit_one_operand (type, integer_one_node, arg00);
5715 if (TREE_OVERFLOW (hi))
5716 return fold (build2 (LT_EXPR, type, arg00, lo));
5717 if (TREE_OVERFLOW (lo))
5718 return fold (build2 (GT_EXPR, type, arg00, hi));
5719 return build_range_check (type, arg00, 0, lo, hi);
5722 if (TREE_OVERFLOW (lo))
5723 return omit_one_operand (type, integer_zero_node, arg00);
5724 return fold (build2 (LT_EXPR, type, arg00, lo));
5727 if (TREE_OVERFLOW (hi))
5728 return omit_one_operand (type, integer_one_node, arg00);
5729 return fold (build2 (LE_EXPR, type, arg00, hi));
5732 if (TREE_OVERFLOW (hi))
5733 return omit_one_operand (type, integer_zero_node, arg00);
5734 return fold (build2 (GT_EXPR, type, arg00, hi));
5737 if (TREE_OVERFLOW (lo))
5738 return omit_one_operand (type, integer_one_node, arg00);
5739 return fold (build2 (GE_EXPR, type, arg00, lo));
5749 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5750 equality/inequality test, then return a simplified form of
5751 the test using shifts and logical operations. Otherwise return
5752 NULL. TYPE is the desired result type. */
5755 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5758 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5760 if (code == TRUTH_NOT_EXPR)
5762 code = TREE_CODE (arg0);
5763 if (code != NE_EXPR && code != EQ_EXPR)
5766 /* Extract the arguments of the EQ/NE. */
5767 arg1 = TREE_OPERAND (arg0, 1);
5768 arg0 = TREE_OPERAND (arg0, 0);
5770 /* This requires us to invert the code. */
5771 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5774 /* If this is testing a single bit, we can optimize the test. */
5775 if ((code == NE_EXPR || code == EQ_EXPR)
5776 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5777 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5779 tree inner = TREE_OPERAND (arg0, 0);
5780 tree type = TREE_TYPE (arg0);
5781 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5782 enum machine_mode operand_mode = TYPE_MODE (type);
5784 tree signed_type, unsigned_type, intermediate_type;
5787 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5788 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5789 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5790 if (arg00 != NULL_TREE
5791 /* This is only a win if casting to a signed type is cheap,
5792 i.e. when arg00's type is not a partial mode. */
5793 && TYPE_PRECISION (TREE_TYPE (arg00))
5794 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5796 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5797 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5798 result_type, fold_convert (stype, arg00),
5799 fold_convert (stype, integer_zero_node)));
5802 /* Otherwise we have (A & C) != 0 where C is a single bit,
5803 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5804 Similarly for (A & C) == 0. */
5806 /* If INNER is a right shift of a constant and it plus BITNUM does
5807 not overflow, adjust BITNUM and INNER. */
5808 if (TREE_CODE (inner) == RSHIFT_EXPR
5809 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5810 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5811 && bitnum < TYPE_PRECISION (type)
5812 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5813 bitnum - TYPE_PRECISION (type)))
5815 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5816 inner = TREE_OPERAND (inner, 0);
5819 /* If we are going to be able to omit the AND below, we must do our
5820 operations as unsigned. If we must use the AND, we have a choice.
5821 Normally unsigned is faster, but for some machines signed is. */
5822 #ifdef LOAD_EXTEND_OP
5823 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5828 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5829 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5830 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5831 inner = fold_convert (intermediate_type, inner);
5834 inner = build2 (RSHIFT_EXPR, intermediate_type,
5835 inner, size_int (bitnum));
5837 if (code == EQ_EXPR)
5838 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5839 inner, integer_one_node));
5841 /* Put the AND last so it can combine with more things. */
5842 inner = build2 (BIT_AND_EXPR, intermediate_type,
5843 inner, integer_one_node);
5845 /* Make sure to return the proper type. */
5846 inner = fold_convert (result_type, inner);
5853 /* Check whether we are allowed to reorder operands arg0 and arg1,
5854 such that the evaluation of arg1 occurs before arg0. */
5857 reorder_operands_p (tree arg0, tree arg1)
5859 if (! flag_evaluation_order)
5861 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5863 return ! TREE_SIDE_EFFECTS (arg0)
5864 && ! TREE_SIDE_EFFECTS (arg1);
5867 /* Test whether it is preferable two swap two operands, ARG0 and
5868 ARG1, for example because ARG0 is an integer constant and ARG1
5869 isn't. If REORDER is true, only recommend swapping if we can
5870 evaluate the operands in reverse order. */
5873 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5875 STRIP_SIGN_NOPS (arg0);
5876 STRIP_SIGN_NOPS (arg1);
5878 if (TREE_CODE (arg1) == INTEGER_CST)
5880 if (TREE_CODE (arg0) == INTEGER_CST)
5883 if (TREE_CODE (arg1) == REAL_CST)
5885 if (TREE_CODE (arg0) == REAL_CST)
5888 if (TREE_CODE (arg1) == COMPLEX_CST)
5890 if (TREE_CODE (arg0) == COMPLEX_CST)
5893 if (TREE_CONSTANT (arg1))
5895 if (TREE_CONSTANT (arg0))
5901 if (reorder && flag_evaluation_order
5902 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5910 if (reorder && flag_evaluation_order
5911 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5919 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5920 for commutative and comparison operators. Ensuring a canonical
5921 form allows the optimizers to find additional redundancies without
5922 having to explicitly check for both orderings. */
5923 if (TREE_CODE (arg0) == SSA_NAME
5924 && TREE_CODE (arg1) == SSA_NAME
5925 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5931 /* Perform constant folding and related simplification of EXPR.
5932 The related simplifications include x*1 => x, x*0 => 0, etc.,
5933 and application of the associative law.
5934 NOP_EXPR conversions may be removed freely (as long as we
5935 are careful not to change the type of the overall expression).
5936 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5937 but we can constant-fold them if they have constant operands. */
5939 #ifdef ENABLE_FOLD_CHECKING
5940 # define fold(x) fold_1 (x)
5941 static tree fold_1 (tree);
5947 const tree t = expr;
5948 const tree type = TREE_TYPE (expr);
5949 tree t1 = NULL_TREE;
5951 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5952 enum tree_code code = TREE_CODE (t);
5953 int kind = TREE_CODE_CLASS (code);
5955 /* WINS will be nonzero when the switch is done
5956 if all operands are constant. */
5959 /* Return right away if a constant. */
5963 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5967 /* Special case for conversion ops that can have fixed point args. */
5968 arg0 = TREE_OPERAND (t, 0);
5970 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5972 STRIP_SIGN_NOPS (arg0);
5974 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5975 subop = TREE_REALPART (arg0);
5979 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5980 && TREE_CODE (subop) != REAL_CST)
5981 /* Note that TREE_CONSTANT isn't enough:
5982 static var addresses are constant but we can't
5983 do arithmetic on them. */
5986 else if (IS_EXPR_CODE_CLASS (kind))
5988 int len = first_rtl_op (code);
5990 for (i = 0; i < len; i++)
5992 tree op = TREE_OPERAND (t, i);
5996 continue; /* Valid for CALL_EXPR, at least. */
5998 /* Strip any conversions that don't change the mode. This is
5999 safe for every expression, except for a comparison expression
6000 because its signedness is derived from its operands. So, in
6001 the latter case, only strip conversions that don't change the
6004 Note that this is done as an internal manipulation within the
6005 constant folder, in order to find the simplest representation
6006 of the arguments so that their form can be studied. In any
6007 cases, the appropriate type conversions should be put back in
6008 the tree that will get out of the constant folder. */
6010 STRIP_SIGN_NOPS (op);
6014 if (TREE_CODE (op) == COMPLEX_CST)
6015 subop = TREE_REALPART (op);
6019 if (TREE_CODE (subop) != INTEGER_CST
6020 && TREE_CODE (subop) != REAL_CST)
6021 /* Note that TREE_CONSTANT isn't enough:
6022 static var addresses are constant but we can't
6023 do arithmetic on them. */
6033 /* If this is a commutative operation, and ARG0 is a constant, move it
6034 to ARG1 to reduce the number of tests below. */
6035 if (commutative_tree_code (code)
6036 && tree_swap_operands_p (arg0, arg1, true))
6037 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6038 TREE_OPERAND (t, 0)));
6040 /* Now WINS is set as described above,
6041 ARG0 is the first operand of EXPR,
6042 and ARG1 is the second operand (if it has more than one operand).
6044 First check for cases where an arithmetic operation is applied to a
6045 compound, conditional, or comparison operation. Push the arithmetic
6046 operation inside the compound or conditional to see if any folding
6047 can then be done. Convert comparison to conditional for this purpose.
6048 The also optimizes non-constant cases that used to be done in
6051 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6052 one of the operands is a comparison and the other is a comparison, a
6053 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6054 code below would make the expression more complex. Change it to a
6055 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6056 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6058 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6059 || code == EQ_EXPR || code == NE_EXPR)
6060 && ((truth_value_p (TREE_CODE (arg0))
6061 && (truth_value_p (TREE_CODE (arg1))
6062 || (TREE_CODE (arg1) == BIT_AND_EXPR
6063 && integer_onep (TREE_OPERAND (arg1, 1)))))
6064 || (truth_value_p (TREE_CODE (arg1))
6065 && (truth_value_p (TREE_CODE (arg0))
6066 || (TREE_CODE (arg0) == BIT_AND_EXPR
6067 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6069 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6070 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6072 type, fold_convert (boolean_type_node, arg0),
6073 fold_convert (boolean_type_node, arg1)));
6075 if (code == EQ_EXPR)
6076 tem = invert_truthvalue (tem);
6081 if (TREE_CODE_CLASS (code) == '1')
6083 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6084 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6085 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6086 else if (TREE_CODE (arg0) == COND_EXPR)
6088 tree arg01 = TREE_OPERAND (arg0, 1);
6089 tree arg02 = TREE_OPERAND (arg0, 2);
6090 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6091 arg01 = fold (build1 (code, type, arg01));
6092 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6093 arg02 = fold (build1 (code, type, arg02));
6094 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6097 /* If this was a conversion, and all we did was to move into
6098 inside the COND_EXPR, bring it back out. But leave it if
6099 it is a conversion from integer to integer and the
6100 result precision is no wider than a word since such a
6101 conversion is cheap and may be optimized away by combine,
6102 while it couldn't if it were outside the COND_EXPR. Then return
6103 so we don't get into an infinite recursion loop taking the
6104 conversion out and then back in. */
6106 if ((code == NOP_EXPR || code == CONVERT_EXPR
6107 || code == NON_LVALUE_EXPR)
6108 && TREE_CODE (tem) == COND_EXPR
6109 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6110 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6111 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6112 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6113 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6114 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6115 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6117 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6118 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6119 tem = build1 (code, type,
6121 TREE_TYPE (TREE_OPERAND
6122 (TREE_OPERAND (tem, 1), 0)),
6123 TREE_OPERAND (tem, 0),
6124 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6125 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6128 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6130 if (TREE_CODE (type) == BOOLEAN_TYPE)
6132 arg0 = copy_node (arg0);
6133 TREE_TYPE (arg0) = type;
6136 else if (TREE_CODE (type) != INTEGER_TYPE)
6137 return fold (build3 (COND_EXPR, type, arg0,
6138 fold (build1 (code, type,
6140 fold (build1 (code, type,
6141 integer_zero_node))));
6144 else if (TREE_CODE_CLASS (code) == '<'
6145 && TREE_CODE (arg0) == COMPOUND_EXPR)
6146 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6147 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6148 else if (TREE_CODE_CLASS (code) == '<'
6149 && TREE_CODE (arg1) == COMPOUND_EXPR)
6150 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6151 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6152 else if (TREE_CODE_CLASS (code) == '2'
6153 || TREE_CODE_CLASS (code) == '<')
6155 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6156 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6157 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6159 if (TREE_CODE (arg1) == COMPOUND_EXPR
6160 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6161 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6162 fold (build2 (code, type,
6163 arg0, TREE_OPERAND (arg1, 1))));
6165 if (TREE_CODE (arg0) == COND_EXPR
6166 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6168 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6169 /*cond_first_p=*/1);
6170 if (tem != NULL_TREE)
6174 if (TREE_CODE (arg1) == COND_EXPR
6175 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6177 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6178 /*cond_first_p=*/0);
6179 if (tem != NULL_TREE)
6187 return fold (DECL_INITIAL (t));
6192 case FIX_TRUNC_EXPR:
6194 case FIX_FLOOR_EXPR:
6195 case FIX_ROUND_EXPR:
6196 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6197 return TREE_OPERAND (t, 0);
6199 /* Handle cases of two conversions in a row. */
6200 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6201 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6203 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6204 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6205 int inside_int = INTEGRAL_TYPE_P (inside_type);
6206 int inside_ptr = POINTER_TYPE_P (inside_type);
6207 int inside_float = FLOAT_TYPE_P (inside_type);
6208 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6209 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6210 int inter_int = INTEGRAL_TYPE_P (inter_type);
6211 int inter_ptr = POINTER_TYPE_P (inter_type);
6212 int inter_float = FLOAT_TYPE_P (inter_type);
6213 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6214 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6215 int final_int = INTEGRAL_TYPE_P (type);
6216 int final_ptr = POINTER_TYPE_P (type);
6217 int final_float = FLOAT_TYPE_P (type);
6218 unsigned int final_prec = TYPE_PRECISION (type);
6219 int final_unsignedp = TYPE_UNSIGNED (type);
6221 /* In addition to the cases of two conversions in a row
6222 handled below, if we are converting something to its own
6223 type via an object of identical or wider precision, neither
6224 conversion is needed. */
6225 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6226 && ((inter_int && final_int) || (inter_float && final_float))
6227 && inter_prec >= final_prec)
6228 return fold (build1 (code, type,
6229 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6231 /* Likewise, if the intermediate and final types are either both
6232 float or both integer, we don't need the middle conversion if
6233 it is wider than the final type and doesn't change the signedness
6234 (for integers). Avoid this if the final type is a pointer
6235 since then we sometimes need the inner conversion. Likewise if
6236 the outer has a precision not equal to the size of its mode. */
6237 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6238 || (inter_float && inside_float))
6239 && inter_prec >= inside_prec
6240 && (inter_float || inter_unsignedp == inside_unsignedp)
6241 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6242 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6244 return fold (build1 (code, type,
6245 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6247 /* If we have a sign-extension of a zero-extended value, we can
6248 replace that by a single zero-extension. */
6249 if (inside_int && inter_int && final_int
6250 && inside_prec < inter_prec && inter_prec < final_prec
6251 && inside_unsignedp && !inter_unsignedp)
6252 return fold (build1 (code, type,
6253 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6255 /* Two conversions in a row are not needed unless:
6256 - some conversion is floating-point (overstrict for now), or
6257 - the intermediate type is narrower than both initial and
6259 - the intermediate type and innermost type differ in signedness,
6260 and the outermost type is wider than the intermediate, or
6261 - the initial type is a pointer type and the precisions of the
6262 intermediate and final types differ, or
6263 - the final type is a pointer type and the precisions of the
6264 initial and intermediate types differ. */
6265 if (! inside_float && ! inter_float && ! final_float
6266 && (inter_prec > inside_prec || inter_prec > final_prec)
6267 && ! (inside_int && inter_int
6268 && inter_unsignedp != inside_unsignedp
6269 && inter_prec < final_prec)
6270 && ((inter_unsignedp && inter_prec > inside_prec)
6271 == (final_unsignedp && final_prec > inter_prec))
6272 && ! (inside_ptr && inter_prec != final_prec)
6273 && ! (final_ptr && inside_prec != inter_prec)
6274 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6275 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6277 return fold (build1 (code, type,
6278 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6281 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6282 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6283 /* Detect assigning a bitfield. */
6284 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6285 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6287 /* Don't leave an assignment inside a conversion
6288 unless assigning a bitfield. */
6289 tree prev = TREE_OPERAND (t, 0);
6290 tem = copy_node (t);
6291 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6292 /* First do the assignment, then return converted constant. */
6293 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6294 TREE_NO_WARNING (tem) = 1;
6295 TREE_USED (tem) = 1;
6299 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6300 constants (if x has signed type, the sign bit cannot be set
6301 in c). This folds extension into the BIT_AND_EXPR. */
6302 if (INTEGRAL_TYPE_P (type)
6303 && TREE_CODE (type) != BOOLEAN_TYPE
6304 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6305 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6307 tree and = TREE_OPERAND (t, 0);
6308 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6311 if (TYPE_UNSIGNED (TREE_TYPE (and))
6312 || (TYPE_PRECISION (type)
6313 <= TYPE_PRECISION (TREE_TYPE (and))))
6315 else if (TYPE_PRECISION (TREE_TYPE (and1))
6316 <= HOST_BITS_PER_WIDE_INT
6317 && host_integerp (and1, 1))
6319 unsigned HOST_WIDE_INT cst;
6321 cst = tree_low_cst (and1, 1);
6322 cst &= (HOST_WIDE_INT) -1
6323 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6324 change = (cst == 0);
6325 #ifdef LOAD_EXTEND_OP
6327 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6330 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6331 and0 = fold_convert (uns, and0);
6332 and1 = fold_convert (uns, and1);
6337 return fold (build2 (BIT_AND_EXPR, type,
6338 fold_convert (type, and0),
6339 fold_convert (type, and1)));
6342 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6343 T2 being pointers to types of the same size. */
6344 if (POINTER_TYPE_P (TREE_TYPE (t))
6345 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6346 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6347 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6349 tree arg00 = TREE_OPERAND (arg0, 0);
6350 tree t0 = TREE_TYPE (t);
6351 tree t1 = TREE_TYPE (arg00);
6352 tree tt0 = TREE_TYPE (t0);
6353 tree tt1 = TREE_TYPE (t1);
6354 tree s0 = TYPE_SIZE (tt0);
6355 tree s1 = TYPE_SIZE (tt1);
6357 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6358 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6359 TREE_OPERAND (arg0, 1));
6362 tem = fold_convert_const (code, type, arg0);
6363 return tem ? tem : t;
6365 case VIEW_CONVERT_EXPR:
6366 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6367 return build1 (VIEW_CONVERT_EXPR, type,
6368 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6372 if (TREE_CODE (arg0) == CONSTRUCTOR
6373 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6375 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6377 return TREE_VALUE (m);
6382 if (TREE_CONSTANT (t) != wins)
6384 tem = copy_node (t);
6385 TREE_CONSTANT (tem) = wins;
6386 TREE_INVARIANT (tem) = wins;
6392 if (negate_expr_p (arg0))
6393 return fold_convert (type, negate_expr (arg0));
6397 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6398 return fold_abs_const (arg0, type);
6399 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6400 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6401 /* Convert fabs((double)float) into (double)fabsf(float). */
6402 else if (TREE_CODE (arg0) == NOP_EXPR
6403 && TREE_CODE (type) == REAL_TYPE)
6405 tree targ0 = strip_float_extensions (arg0);
6407 return fold_convert (type, fold (build1 (ABS_EXPR,
6411 else if (tree_expr_nonnegative_p (arg0))
6416 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6417 return fold_convert (type, arg0);
6418 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6419 return build2 (COMPLEX_EXPR, type,
6420 TREE_OPERAND (arg0, 0),
6421 negate_expr (TREE_OPERAND (arg0, 1)));
6422 else if (TREE_CODE (arg0) == COMPLEX_CST)
6423 return build_complex (type, TREE_REALPART (arg0),
6424 negate_expr (TREE_IMAGPART (arg0)));
6425 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6426 return fold (build2 (TREE_CODE (arg0), type,
6427 fold (build1 (CONJ_EXPR, type,
6428 TREE_OPERAND (arg0, 0))),
6429 fold (build1 (CONJ_EXPR, type,
6430 TREE_OPERAND (arg0, 1)))));
6431 else if (TREE_CODE (arg0) == CONJ_EXPR)
6432 return TREE_OPERAND (arg0, 0);
6436 if (TREE_CODE (arg0) == INTEGER_CST)
6437 return fold_not_const (arg0, type);
6438 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6439 return TREE_OPERAND (arg0, 0);
6443 /* A + (-B) -> A - B */
6444 if (TREE_CODE (arg1) == NEGATE_EXPR)
6445 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6446 /* (-A) + B -> B - A */
6447 if (TREE_CODE (arg0) == NEGATE_EXPR
6448 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6449 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6450 if (! FLOAT_TYPE_P (type))
6452 if (integer_zerop (arg1))
6453 return non_lvalue (fold_convert (type, arg0));
6455 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6456 with a constant, and the two constants have no bits in common,
6457 we should treat this as a BIT_IOR_EXPR since this may produce more
6459 if (TREE_CODE (arg0) == BIT_AND_EXPR
6460 && TREE_CODE (arg1) == BIT_AND_EXPR
6461 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6462 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6463 && integer_zerop (const_binop (BIT_AND_EXPR,
6464 TREE_OPERAND (arg0, 1),
6465 TREE_OPERAND (arg1, 1), 0)))
6467 code = BIT_IOR_EXPR;
6471 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6472 (plus (plus (mult) (mult)) (foo)) so that we can
6473 take advantage of the factoring cases below. */
6474 if ((TREE_CODE (arg0) == PLUS_EXPR
6475 && TREE_CODE (arg1) == MULT_EXPR)
6476 || (TREE_CODE (arg1) == PLUS_EXPR
6477 && TREE_CODE (arg0) == MULT_EXPR))
6479 tree parg0, parg1, parg, marg;
6481 if (TREE_CODE (arg0) == PLUS_EXPR)
6482 parg = arg0, marg = arg1;
6484 parg = arg1, marg = arg0;
6485 parg0 = TREE_OPERAND (parg, 0);
6486 parg1 = TREE_OPERAND (parg, 1);
6490 if (TREE_CODE (parg0) == MULT_EXPR
6491 && TREE_CODE (parg1) != MULT_EXPR)
6492 return fold (build2 (PLUS_EXPR, type,
6493 fold (build2 (PLUS_EXPR, type,
6494 fold_convert (type, parg0),
6495 fold_convert (type, marg))),
6496 fold_convert (type, parg1)));
6497 if (TREE_CODE (parg0) != MULT_EXPR
6498 && TREE_CODE (parg1) == MULT_EXPR)
6499 return fold (build2 (PLUS_EXPR, type,
6500 fold (build2 (PLUS_EXPR, type,
6501 fold_convert (type, parg1),
6502 fold_convert (type, marg))),
6503 fold_convert (type, parg0)));
6506 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6508 tree arg00, arg01, arg10, arg11;
6509 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6511 /* (A * C) + (B * C) -> (A+B) * C.
6512 We are most concerned about the case where C is a constant,
6513 but other combinations show up during loop reduction. Since
6514 it is not difficult, try all four possibilities. */
6516 arg00 = TREE_OPERAND (arg0, 0);
6517 arg01 = TREE_OPERAND (arg0, 1);
6518 arg10 = TREE_OPERAND (arg1, 0);
6519 arg11 = TREE_OPERAND (arg1, 1);
6522 if (operand_equal_p (arg01, arg11, 0))
6523 same = arg01, alt0 = arg00, alt1 = arg10;
6524 else if (operand_equal_p (arg00, arg10, 0))
6525 same = arg00, alt0 = arg01, alt1 = arg11;
6526 else if (operand_equal_p (arg00, arg11, 0))
6527 same = arg00, alt0 = arg01, alt1 = arg10;
6528 else if (operand_equal_p (arg01, arg10, 0))
6529 same = arg01, alt0 = arg00, alt1 = arg11;
6531 /* No identical multiplicands; see if we can find a common
6532 power-of-two factor in non-power-of-two multiplies. This
6533 can help in multi-dimensional array access. */
6534 else if (TREE_CODE (arg01) == INTEGER_CST
6535 && TREE_CODE (arg11) == INTEGER_CST
6536 && TREE_INT_CST_HIGH (arg01) == 0
6537 && TREE_INT_CST_HIGH (arg11) == 0)
6539 HOST_WIDE_INT int01, int11, tmp;
6540 int01 = TREE_INT_CST_LOW (arg01);
6541 int11 = TREE_INT_CST_LOW (arg11);
6543 /* Move min of absolute values to int11. */
6544 if ((int01 >= 0 ? int01 : -int01)
6545 < (int11 >= 0 ? int11 : -int11))
6547 tmp = int01, int01 = int11, int11 = tmp;
6548 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6549 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6552 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6554 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6555 build_int_cst (NULL_TREE,
6563 return fold (build2 (MULT_EXPR, type,
6564 fold (build2 (PLUS_EXPR, type,
6571 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6572 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6573 return non_lvalue (fold_convert (type, arg0));
6575 /* Likewise if the operands are reversed. */
6576 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6577 return non_lvalue (fold_convert (type, arg1));
6579 /* Convert X + -C into X - C. */
6580 if (TREE_CODE (arg1) == REAL_CST
6581 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6583 tem = fold_negate_const (arg1, type);
6584 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6585 return fold (build2 (MINUS_EXPR, type,
6586 fold_convert (type, arg0),
6587 fold_convert (type, tem)));
6590 /* Convert x+x into x*2.0. */
6591 if (operand_equal_p (arg0, arg1, 0)
6592 && SCALAR_FLOAT_TYPE_P (type))
6593 return fold (build2 (MULT_EXPR, type, arg0,
6594 build_real (type, dconst2)));
6596 /* Convert x*c+x into x*(c+1). */
6597 if (flag_unsafe_math_optimizations
6598 && TREE_CODE (arg0) == MULT_EXPR
6599 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6600 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6601 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6605 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6606 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6607 return fold (build2 (MULT_EXPR, type, arg1,
6608 build_real (type, c)));
6611 /* Convert x+x*c into x*(c+1). */
6612 if (flag_unsafe_math_optimizations
6613 && TREE_CODE (arg1) == MULT_EXPR
6614 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6615 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6616 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6620 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6621 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6622 return fold (build2 (MULT_EXPR, type, arg0,
6623 build_real (type, c)));
6626 /* Convert x*c1+x*c2 into x*(c1+c2). */
6627 if (flag_unsafe_math_optimizations
6628 && TREE_CODE (arg0) == MULT_EXPR
6629 && TREE_CODE (arg1) == MULT_EXPR
6630 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6631 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6632 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6633 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6634 && operand_equal_p (TREE_OPERAND (arg0, 0),
6635 TREE_OPERAND (arg1, 0), 0))
6637 REAL_VALUE_TYPE c1, c2;
6639 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6640 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6641 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6642 return fold (build2 (MULT_EXPR, type,
6643 TREE_OPERAND (arg0, 0),
6644 build_real (type, c1)));
6646 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6647 if (flag_unsafe_math_optimizations
6648 && TREE_CODE (arg1) == PLUS_EXPR
6649 && TREE_CODE (arg0) != MULT_EXPR)
6651 tree tree10 = TREE_OPERAND (arg1, 0);
6652 tree tree11 = TREE_OPERAND (arg1, 1);
6653 if (TREE_CODE (tree11) == MULT_EXPR
6654 && TREE_CODE (tree10) == MULT_EXPR)
6657 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6658 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6661 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6662 if (flag_unsafe_math_optimizations
6663 && TREE_CODE (arg0) == PLUS_EXPR
6664 && TREE_CODE (arg1) != MULT_EXPR)
6666 tree tree00 = TREE_OPERAND (arg0, 0);
6667 tree tree01 = TREE_OPERAND (arg0, 1);
6668 if (TREE_CODE (tree01) == MULT_EXPR
6669 && TREE_CODE (tree00) == MULT_EXPR)
6672 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6673 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6679 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6680 is a rotate of A by C1 bits. */
6681 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6682 is a rotate of A by B bits. */
6684 enum tree_code code0, code1;
6685 code0 = TREE_CODE (arg0);
6686 code1 = TREE_CODE (arg1);
6687 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6688 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6689 && operand_equal_p (TREE_OPERAND (arg0, 0),
6690 TREE_OPERAND (arg1, 0), 0)
6691 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6693 tree tree01, tree11;
6694 enum tree_code code01, code11;
6696 tree01 = TREE_OPERAND (arg0, 1);
6697 tree11 = TREE_OPERAND (arg1, 1);
6698 STRIP_NOPS (tree01);
6699 STRIP_NOPS (tree11);
6700 code01 = TREE_CODE (tree01);
6701 code11 = TREE_CODE (tree11);
6702 if (code01 == INTEGER_CST
6703 && code11 == INTEGER_CST
6704 && TREE_INT_CST_HIGH (tree01) == 0
6705 && TREE_INT_CST_HIGH (tree11) == 0
6706 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6707 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6708 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6709 code0 == LSHIFT_EXPR ? tree01 : tree11);
6710 else if (code11 == MINUS_EXPR)
6712 tree tree110, tree111;
6713 tree110 = TREE_OPERAND (tree11, 0);
6714 tree111 = TREE_OPERAND (tree11, 1);
6715 STRIP_NOPS (tree110);
6716 STRIP_NOPS (tree111);
6717 if (TREE_CODE (tree110) == INTEGER_CST
6718 && 0 == compare_tree_int (tree110,
6720 (TREE_TYPE (TREE_OPERAND
6722 && operand_equal_p (tree01, tree111, 0))
6723 return build2 ((code0 == LSHIFT_EXPR
6726 type, TREE_OPERAND (arg0, 0), tree01);
6728 else if (code01 == MINUS_EXPR)
6730 tree tree010, tree011;
6731 tree010 = TREE_OPERAND (tree01, 0);
6732 tree011 = TREE_OPERAND (tree01, 1);
6733 STRIP_NOPS (tree010);
6734 STRIP_NOPS (tree011);
6735 if (TREE_CODE (tree010) == INTEGER_CST
6736 && 0 == compare_tree_int (tree010,
6738 (TREE_TYPE (TREE_OPERAND
6740 && operand_equal_p (tree11, tree011, 0))
6741 return build2 ((code0 != LSHIFT_EXPR
6744 type, TREE_OPERAND (arg0, 0), tree11);
6750 /* In most languages, can't associate operations on floats through
6751 parentheses. Rather than remember where the parentheses were, we
6752 don't associate floats at all, unless the user has specified
6753 -funsafe-math-optimizations. */
6756 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6758 tree var0, con0, lit0, minus_lit0;
6759 tree var1, con1, lit1, minus_lit1;
6761 /* Split both trees into variables, constants, and literals. Then
6762 associate each group together, the constants with literals,
6763 then the result with variables. This increases the chances of
6764 literals being recombined later and of generating relocatable
6765 expressions for the sum of a constant and literal. */
6766 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6767 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6768 code == MINUS_EXPR);
6770 /* Only do something if we found more than two objects. Otherwise,
6771 nothing has changed and we risk infinite recursion. */
6772 if (2 < ((var0 != 0) + (var1 != 0)
6773 + (con0 != 0) + (con1 != 0)
6774 + (lit0 != 0) + (lit1 != 0)
6775 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6777 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6778 if (code == MINUS_EXPR)
6781 var0 = associate_trees (var0, var1, code, type);
6782 con0 = associate_trees (con0, con1, code, type);
6783 lit0 = associate_trees (lit0, lit1, code, type);
6784 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6786 /* Preserve the MINUS_EXPR if the negative part of the literal is
6787 greater than the positive part. Otherwise, the multiplicative
6788 folding code (i.e extract_muldiv) may be fooled in case
6789 unsigned constants are subtracted, like in the following
6790 example: ((X*2 + 4) - 8U)/2. */
6791 if (minus_lit0 && lit0)
6793 if (TREE_CODE (lit0) == INTEGER_CST
6794 && TREE_CODE (minus_lit0) == INTEGER_CST
6795 && tree_int_cst_lt (lit0, minus_lit0))
6797 minus_lit0 = associate_trees (minus_lit0, lit0,
6803 lit0 = associate_trees (lit0, minus_lit0,
6811 return fold_convert (type,
6812 associate_trees (var0, minus_lit0,
6816 con0 = associate_trees (con0, minus_lit0,
6818 return fold_convert (type,
6819 associate_trees (var0, con0,
6824 con0 = associate_trees (con0, lit0, code, type);
6825 return fold_convert (type, associate_trees (var0, con0,
6832 t1 = const_binop (code, arg0, arg1, 0);
6833 if (t1 != NULL_TREE)
6835 /* The return value should always have
6836 the same type as the original expression. */
6837 if (TREE_TYPE (t1) != type)
6838 t1 = fold_convert (type, t1);
6845 /* A - (-B) -> A + B */
6846 if (TREE_CODE (arg1) == NEGATE_EXPR)
6847 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6848 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6849 if (TREE_CODE (arg0) == NEGATE_EXPR
6850 && (FLOAT_TYPE_P (type)
6851 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6852 && negate_expr_p (arg1)
6853 && reorder_operands_p (arg0, arg1))
6854 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6855 TREE_OPERAND (arg0, 0)));
6857 if (! FLOAT_TYPE_P (type))
6859 if (! wins && integer_zerop (arg0))
6860 return negate_expr (fold_convert (type, arg1));
6861 if (integer_zerop (arg1))
6862 return non_lvalue (fold_convert (type, arg0));
6864 /* Fold A - (A & B) into ~B & A. */
6865 if (!TREE_SIDE_EFFECTS (arg0)
6866 && TREE_CODE (arg1) == BIT_AND_EXPR)
6868 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6869 return fold (build2 (BIT_AND_EXPR, type,
6870 fold (build1 (BIT_NOT_EXPR, type,
6871 TREE_OPERAND (arg1, 0))),
6873 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6874 return fold (build2 (BIT_AND_EXPR, type,
6875 fold (build1 (BIT_NOT_EXPR, type,
6876 TREE_OPERAND (arg1, 1))),
6880 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6881 any power of 2 minus 1. */
6882 if (TREE_CODE (arg0) == BIT_AND_EXPR
6883 && TREE_CODE (arg1) == BIT_AND_EXPR
6884 && operand_equal_p (TREE_OPERAND (arg0, 0),
6885 TREE_OPERAND (arg1, 0), 0))
6887 tree mask0 = TREE_OPERAND (arg0, 1);
6888 tree mask1 = TREE_OPERAND (arg1, 1);
6889 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6891 if (operand_equal_p (tem, mask1, 0))
6893 tem = fold (build2 (BIT_XOR_EXPR, type,
6894 TREE_OPERAND (arg0, 0), mask1));
6895 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6900 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6901 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6902 return non_lvalue (fold_convert (type, arg0));
6904 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6905 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6906 (-ARG1 + ARG0) reduces to -ARG1. */
6907 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6908 return negate_expr (fold_convert (type, arg1));
6910 /* Fold &x - &x. This can happen from &x.foo - &x.
6911 This is unsafe for certain floats even in non-IEEE formats.
6912 In IEEE, it is unsafe because it does wrong for NaNs.
6913 Also note that operand_equal_p is always false if an operand
6916 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6917 && operand_equal_p (arg0, arg1, 0))
6918 return fold_convert (type, integer_zero_node);
6920 /* A - B -> A + (-B) if B is easily negatable. */
6921 if (!wins && negate_expr_p (arg1)
6922 && ((FLOAT_TYPE_P (type)
6923 /* Avoid this transformation if B is a positive REAL_CST. */
6924 && (TREE_CODE (arg1) != REAL_CST
6925 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6926 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6927 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6929 if (TREE_CODE (arg0) == MULT_EXPR
6930 && TREE_CODE (arg1) == MULT_EXPR
6931 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6933 /* (A * C) - (B * C) -> (A-B) * C. */
6934 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6935 TREE_OPERAND (arg1, 1), 0))
6936 return fold (build2 (MULT_EXPR, type,
6937 fold (build2 (MINUS_EXPR, type,
6938 TREE_OPERAND (arg0, 0),
6939 TREE_OPERAND (arg1, 0))),
6940 TREE_OPERAND (arg0, 1)));
6941 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6942 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6943 TREE_OPERAND (arg1, 0), 0))
6944 return fold (build2 (MULT_EXPR, type,
6945 TREE_OPERAND (arg0, 0),
6946 fold (build2 (MINUS_EXPR, type,
6947 TREE_OPERAND (arg0, 1),
6948 TREE_OPERAND (arg1, 1)))));
6954 /* (-A) * (-B) -> A * B */
6955 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6956 return fold (build2 (MULT_EXPR, type,
6957 TREE_OPERAND (arg0, 0),
6958 negate_expr (arg1)));
6959 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6960 return fold (build2 (MULT_EXPR, type,
6962 TREE_OPERAND (arg1, 0)));
6964 if (! FLOAT_TYPE_P (type))
6966 if (integer_zerop (arg1))
6967 return omit_one_operand (type, arg1, arg0);
6968 if (integer_onep (arg1))
6969 return non_lvalue (fold_convert (type, arg0));
6971 /* (a * (1 << b)) is (a << b) */
6972 if (TREE_CODE (arg1) == LSHIFT_EXPR
6973 && integer_onep (TREE_OPERAND (arg1, 0)))
6974 return fold (build2 (LSHIFT_EXPR, type, arg0,
6975 TREE_OPERAND (arg1, 1)));
6976 if (TREE_CODE (arg0) == LSHIFT_EXPR
6977 && integer_onep (TREE_OPERAND (arg0, 0)))
6978 return fold (build2 (LSHIFT_EXPR, type, arg1,
6979 TREE_OPERAND (arg0, 1)));
6981 if (TREE_CODE (arg1) == INTEGER_CST
6982 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6983 fold_convert (type, arg1),
6985 return fold_convert (type, tem);
6990 /* Maybe fold x * 0 to 0. The expressions aren't the same
6991 when x is NaN, since x * 0 is also NaN. Nor are they the
6992 same in modes with signed zeros, since multiplying a
6993 negative value by 0 gives -0, not +0. */
6994 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6995 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6996 && real_zerop (arg1))
6997 return omit_one_operand (type, arg1, arg0);
6998 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6999 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7000 && real_onep (arg1))
7001 return non_lvalue (fold_convert (type, arg0));
7003 /* Transform x * -1.0 into -x. */
7004 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7005 && real_minus_onep (arg1))
7006 return fold_convert (type, negate_expr (arg0));
7008 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7009 if (flag_unsafe_math_optimizations
7010 && TREE_CODE (arg0) == RDIV_EXPR
7011 && TREE_CODE (arg1) == REAL_CST
7012 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7014 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7017 return fold (build2 (RDIV_EXPR, type, tem,
7018 TREE_OPERAND (arg0, 1)));
7021 if (flag_unsafe_math_optimizations)
7023 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7024 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7026 /* Optimizations of root(...)*root(...). */
7027 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7029 tree rootfn, arg, arglist;
7030 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7031 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7033 /* Optimize sqrt(x)*sqrt(x) as x. */
7034 if (BUILTIN_SQRT_P (fcode0)
7035 && operand_equal_p (arg00, arg10, 0)
7036 && ! HONOR_SNANS (TYPE_MODE (type)))
7039 /* Optimize root(x)*root(y) as root(x*y). */
7040 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7041 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7042 arglist = build_tree_list (NULL_TREE, arg);
7043 return build_function_call_expr (rootfn, arglist);
7046 /* Optimize expN(x)*expN(y) as expN(x+y). */
7047 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7049 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7050 tree arg = build2 (PLUS_EXPR, type,
7051 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7052 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7053 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7054 return build_function_call_expr (expfn, arglist);
7057 /* Optimizations of pow(...)*pow(...). */
7058 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7059 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7060 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7062 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7063 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7065 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7066 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7069 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7070 if (operand_equal_p (arg01, arg11, 0))
7072 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7073 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7074 tree arglist = tree_cons (NULL_TREE, fold (arg),
7075 build_tree_list (NULL_TREE,
7077 return build_function_call_expr (powfn, arglist);
7080 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7081 if (operand_equal_p (arg00, arg10, 0))
7083 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7084 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7085 tree arglist = tree_cons (NULL_TREE, arg00,
7086 build_tree_list (NULL_TREE,
7088 return build_function_call_expr (powfn, arglist);
7092 /* Optimize tan(x)*cos(x) as sin(x). */
7093 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7094 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7095 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7096 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7097 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7098 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7099 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7100 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7102 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7104 if (sinfn != NULL_TREE)
7105 return build_function_call_expr (sinfn,
7106 TREE_OPERAND (arg0, 1));
7109 /* Optimize x*pow(x,c) as pow(x,c+1). */
7110 if (fcode1 == BUILT_IN_POW
7111 || fcode1 == BUILT_IN_POWF
7112 || fcode1 == BUILT_IN_POWL)
7114 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7115 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7117 if (TREE_CODE (arg11) == REAL_CST
7118 && ! TREE_CONSTANT_OVERFLOW (arg11)
7119 && operand_equal_p (arg0, arg10, 0))
7121 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7125 c = TREE_REAL_CST (arg11);
7126 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7127 arg = build_real (type, c);
7128 arglist = build_tree_list (NULL_TREE, arg);
7129 arglist = tree_cons (NULL_TREE, arg0, arglist);
7130 return build_function_call_expr (powfn, arglist);
7134 /* Optimize pow(x,c)*x as pow(x,c+1). */
7135 if (fcode0 == BUILT_IN_POW
7136 || fcode0 == BUILT_IN_POWF
7137 || fcode0 == BUILT_IN_POWL)
7139 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7140 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7142 if (TREE_CODE (arg01) == REAL_CST
7143 && ! TREE_CONSTANT_OVERFLOW (arg01)
7144 && operand_equal_p (arg1, arg00, 0))
7146 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7150 c = TREE_REAL_CST (arg01);
7151 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7152 arg = build_real (type, c);
7153 arglist = build_tree_list (NULL_TREE, arg);
7154 arglist = tree_cons (NULL_TREE, arg1, arglist);
7155 return build_function_call_expr (powfn, arglist);
7159 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7161 && operand_equal_p (arg0, arg1, 0))
7163 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7167 tree arg = build_real (type, dconst2);
7168 tree arglist = build_tree_list (NULL_TREE, arg);
7169 arglist = tree_cons (NULL_TREE, arg0, arglist);
7170 return build_function_call_expr (powfn, arglist);
7179 if (integer_all_onesp (arg1))
7180 return omit_one_operand (type, arg1, arg0);
7181 if (integer_zerop (arg1))
7182 return non_lvalue (fold_convert (type, arg0));
7183 if (operand_equal_p (arg0, arg1, 0))
7184 return non_lvalue (fold_convert (type, arg0));
7187 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7188 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7190 t1 = build_int_cst (type, -1);
7191 t1 = force_fit_type (t1, 0, false, false);
7192 return omit_one_operand (type, t1, arg1);
7196 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7197 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7199 t1 = build_int_cst (type, -1);
7200 t1 = force_fit_type (t1, 0, false, false);
7201 return omit_one_operand (type, t1, arg0);
7204 t1 = distribute_bit_expr (code, type, arg0, arg1);
7205 if (t1 != NULL_TREE)
7208 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7210 This results in more efficient code for machines without a NAND
7211 instruction. Combine will canonicalize to the first form
7212 which will allow use of NAND instructions provided by the
7213 backend if they exist. */
7214 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7215 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7217 return fold (build1 (BIT_NOT_EXPR, type,
7218 build2 (BIT_AND_EXPR, type,
7219 TREE_OPERAND (arg0, 0),
7220 TREE_OPERAND (arg1, 0))));
7223 /* See if this can be simplified into a rotate first. If that
7224 is unsuccessful continue in the association code. */
7228 if (integer_zerop (arg1))
7229 return non_lvalue (fold_convert (type, arg0));
7230 if (integer_all_onesp (arg1))
7231 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7232 if (operand_equal_p (arg0, arg1, 0))
7233 return omit_one_operand (type, integer_zero_node, arg0);
7236 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7237 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7239 t1 = build_int_cst (type, -1);
7240 t1 = force_fit_type (t1, 0, false, false);
7241 return omit_one_operand (type, t1, arg1);
7245 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7246 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7248 t1 = build_int_cst (type, -1);
7249 t1 = force_fit_type (t1, 0, false, false);
7250 return omit_one_operand (type, t1, arg0);
7253 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7254 with a constant, and the two constants have no bits in common,
7255 we should treat this as a BIT_IOR_EXPR since this may produce more
7257 if (TREE_CODE (arg0) == BIT_AND_EXPR
7258 && TREE_CODE (arg1) == BIT_AND_EXPR
7259 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7260 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7261 && integer_zerop (const_binop (BIT_AND_EXPR,
7262 TREE_OPERAND (arg0, 1),
7263 TREE_OPERAND (arg1, 1), 0)))
7265 code = BIT_IOR_EXPR;
7269 /* See if this can be simplified into a rotate first. If that
7270 is unsuccessful continue in the association code. */
7274 if (integer_all_onesp (arg1))
7275 return non_lvalue (fold_convert (type, arg0));
7276 if (integer_zerop (arg1))
7277 return omit_one_operand (type, arg1, arg0);
7278 if (operand_equal_p (arg0, arg1, 0))
7279 return non_lvalue (fold_convert (type, arg0));
7281 /* ~X & X is always zero. */
7282 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7283 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7284 return omit_one_operand (type, integer_zero_node, arg1);
7286 /* X & ~X is always zero. */
7287 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7288 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7289 return omit_one_operand (type, integer_zero_node, arg0);
7291 t1 = distribute_bit_expr (code, type, arg0, arg1);
7292 if (t1 != NULL_TREE)
7294 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7295 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7296 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7299 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7301 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7302 && (~TREE_INT_CST_LOW (arg1)
7303 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7304 return fold_convert (type, TREE_OPERAND (arg0, 0));
7307 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7309 This results in more efficient code for machines without a NOR
7310 instruction. Combine will canonicalize to the first form
7311 which will allow use of NOR instructions provided by the
7312 backend if they exist. */
7313 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7314 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7316 return fold (build1 (BIT_NOT_EXPR, type,
7317 build2 (BIT_IOR_EXPR, type,
7318 TREE_OPERAND (arg0, 0),
7319 TREE_OPERAND (arg1, 0))));
7325 /* Don't touch a floating-point divide by zero unless the mode
7326 of the constant can represent infinity. */
7327 if (TREE_CODE (arg1) == REAL_CST
7328 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7329 && real_zerop (arg1))
7332 /* (-A) / (-B) -> A / B */
7333 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7334 return fold (build2 (RDIV_EXPR, type,
7335 TREE_OPERAND (arg0, 0),
7336 negate_expr (arg1)));
7337 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7338 return fold (build2 (RDIV_EXPR, type,
7340 TREE_OPERAND (arg1, 0)));
7342 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7343 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7344 && real_onep (arg1))
7345 return non_lvalue (fold_convert (type, arg0));
7347 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7348 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7349 && real_minus_onep (arg1))
7350 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7352 /* If ARG1 is a constant, we can convert this to a multiply by the
7353 reciprocal. This does not have the same rounding properties,
7354 so only do this if -funsafe-math-optimizations. We can actually
7355 always safely do it if ARG1 is a power of two, but it's hard to
7356 tell if it is or not in a portable manner. */
7357 if (TREE_CODE (arg1) == REAL_CST)
7359 if (flag_unsafe_math_optimizations
7360 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7362 return fold (build2 (MULT_EXPR, type, arg0, tem));
7363 /* Find the reciprocal if optimizing and the result is exact. */
7367 r = TREE_REAL_CST (arg1);
7368 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7370 tem = build_real (type, r);
7371 return fold (build2 (MULT_EXPR, type, arg0, tem));
7375 /* Convert A/B/C to A/(B*C). */
7376 if (flag_unsafe_math_optimizations
7377 && TREE_CODE (arg0) == RDIV_EXPR)
7378 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7379 fold (build2 (MULT_EXPR, type,
7380 TREE_OPERAND (arg0, 1), arg1))));
7382 /* Convert A/(B/C) to (A/B)*C. */
7383 if (flag_unsafe_math_optimizations
7384 && TREE_CODE (arg1) == RDIV_EXPR)
7385 return fold (build2 (MULT_EXPR, type,
7386 fold (build2 (RDIV_EXPR, type, arg0,
7387 TREE_OPERAND (arg1, 0))),
7388 TREE_OPERAND (arg1, 1)));
7390 /* Convert C1/(X*C2) into (C1/C2)/X. */
7391 if (flag_unsafe_math_optimizations
7392 && TREE_CODE (arg1) == MULT_EXPR
7393 && TREE_CODE (arg0) == REAL_CST
7394 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7396 tree tem = const_binop (RDIV_EXPR, arg0,
7397 TREE_OPERAND (arg1, 1), 0);
7399 return fold (build2 (RDIV_EXPR, type, tem,
7400 TREE_OPERAND (arg1, 0)));
7403 if (flag_unsafe_math_optimizations)
7405 enum built_in_function fcode = builtin_mathfn_code (arg1);
7406 /* Optimize x/expN(y) into x*expN(-y). */
7407 if (BUILTIN_EXPONENT_P (fcode))
7409 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7410 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7411 tree arglist = build_tree_list (NULL_TREE,
7412 fold_convert (type, arg));
7413 arg1 = build_function_call_expr (expfn, arglist);
7414 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7417 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7418 if (fcode == BUILT_IN_POW
7419 || fcode == BUILT_IN_POWF
7420 || fcode == BUILT_IN_POWL)
7422 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7423 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7424 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7425 tree neg11 = fold_convert (type, negate_expr (arg11));
7426 tree arglist = tree_cons(NULL_TREE, arg10,
7427 build_tree_list (NULL_TREE, neg11));
7428 arg1 = build_function_call_expr (powfn, arglist);
7429 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7433 if (flag_unsafe_math_optimizations)
7435 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7436 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7438 /* Optimize sin(x)/cos(x) as tan(x). */
7439 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7440 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7441 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7442 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7443 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7445 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7447 if (tanfn != NULL_TREE)
7448 return build_function_call_expr (tanfn,
7449 TREE_OPERAND (arg0, 1));
7452 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7453 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7454 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7455 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7456 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7457 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7459 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7461 if (tanfn != NULL_TREE)
7463 tree tmp = TREE_OPERAND (arg0, 1);
7464 tmp = build_function_call_expr (tanfn, tmp);
7465 return fold (build2 (RDIV_EXPR, type,
7466 build_real (type, dconst1), tmp));
7470 /* Optimize pow(x,c)/x as pow(x,c-1). */
7471 if (fcode0 == BUILT_IN_POW
7472 || fcode0 == BUILT_IN_POWF
7473 || fcode0 == BUILT_IN_POWL)
7475 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7476 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7477 if (TREE_CODE (arg01) == REAL_CST
7478 && ! TREE_CONSTANT_OVERFLOW (arg01)
7479 && operand_equal_p (arg1, arg00, 0))
7481 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7485 c = TREE_REAL_CST (arg01);
7486 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7487 arg = build_real (type, c);
7488 arglist = build_tree_list (NULL_TREE, arg);
7489 arglist = tree_cons (NULL_TREE, arg1, arglist);
7490 return build_function_call_expr (powfn, arglist);
7496 case TRUNC_DIV_EXPR:
7497 case ROUND_DIV_EXPR:
7498 case FLOOR_DIV_EXPR:
7500 case EXACT_DIV_EXPR:
7501 if (integer_onep (arg1))
7502 return non_lvalue (fold_convert (type, arg0));
7503 if (integer_zerop (arg1))
7506 if (!TYPE_UNSIGNED (type)
7507 && TREE_CODE (arg1) == INTEGER_CST
7508 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7509 && TREE_INT_CST_HIGH (arg1) == -1)
7510 return fold_convert (type, negate_expr (arg0));
7512 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7513 operation, EXACT_DIV_EXPR.
7515 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7516 At one time others generated faster code, it's not clear if they do
7517 after the last round to changes to the DIV code in expmed.c. */
7518 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7519 && multiple_of_p (type, arg0, arg1))
7520 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7522 if (TREE_CODE (arg1) == INTEGER_CST
7523 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7525 return fold_convert (type, tem);
7530 case FLOOR_MOD_EXPR:
7531 case ROUND_MOD_EXPR:
7532 case TRUNC_MOD_EXPR:
7533 if (integer_onep (arg1))
7534 return omit_one_operand (type, integer_zero_node, arg0);
7535 if (integer_zerop (arg1))
7538 /* X % -1 is zero. */
7539 if (!TYPE_UNSIGNED (type)
7540 && TREE_CODE (arg1) == INTEGER_CST
7541 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7542 && TREE_INT_CST_HIGH (arg1) == -1)
7543 return omit_one_operand (type, integer_zero_node, arg0);
7545 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7546 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7547 if (code == TRUNC_MOD_EXPR
7548 && TYPE_UNSIGNED (type)
7549 && integer_pow2p (arg1))
7551 unsigned HOST_WIDE_INT high, low;
7555 l = tree_log2 (arg1);
7556 if (l >= HOST_BITS_PER_WIDE_INT)
7558 high = ((unsigned HOST_WIDE_INT) 1
7559 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7565 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7568 mask = build_int_cst_wide (type, low, high);
7569 return fold (build2 (BIT_AND_EXPR, type,
7570 fold_convert (type, arg0), mask));
7573 /* X % -C is the same as X % C. */
7574 if (code == TRUNC_MOD_EXPR
7575 && !TYPE_UNSIGNED (type)
7576 && TREE_CODE (arg1) == INTEGER_CST
7577 && TREE_INT_CST_HIGH (arg1) < 0
7579 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7580 && !sign_bit_p (arg1, arg1))
7581 return fold (build2 (code, type, fold_convert (type, arg0),
7582 fold_convert (type, negate_expr (arg1))));
7584 /* X % -Y is the same as X % Y. */
7585 if (code == TRUNC_MOD_EXPR
7586 && !TYPE_UNSIGNED (type)
7587 && TREE_CODE (arg1) == NEGATE_EXPR
7589 return fold (build2 (code, type, fold_convert (type, arg0),
7590 fold_convert (type, TREE_OPERAND (arg1, 0))));
7592 if (TREE_CODE (arg1) == INTEGER_CST
7593 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7595 return fold_convert (type, tem);
7601 if (integer_all_onesp (arg0))
7602 return omit_one_operand (type, arg0, arg1);
7606 /* Optimize -1 >> x for arithmetic right shifts. */
7607 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7608 return omit_one_operand (type, arg0, arg1);
7609 /* ... fall through ... */
7613 if (integer_zerop (arg1))
7614 return non_lvalue (fold_convert (type, arg0));
7615 if (integer_zerop (arg0))
7616 return omit_one_operand (type, arg0, arg1);
7618 /* Since negative shift count is not well-defined,
7619 don't try to compute it in the compiler. */
7620 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7622 /* Rewrite an LROTATE_EXPR by a constant into an
7623 RROTATE_EXPR by a new constant. */
7624 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7626 tree tem = build_int_cst (NULL_TREE,
7627 GET_MODE_BITSIZE (TYPE_MODE (type)));
7628 tem = fold_convert (TREE_TYPE (arg1), tem);
7629 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7630 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7633 /* If we have a rotate of a bit operation with the rotate count and
7634 the second operand of the bit operation both constant,
7635 permute the two operations. */
7636 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7637 && (TREE_CODE (arg0) == BIT_AND_EXPR
7638 || TREE_CODE (arg0) == BIT_IOR_EXPR
7639 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7640 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7641 return fold (build2 (TREE_CODE (arg0), type,
7642 fold (build2 (code, type,
7643 TREE_OPERAND (arg0, 0), arg1)),
7644 fold (build2 (code, type,
7645 TREE_OPERAND (arg0, 1), arg1))));
7647 /* Two consecutive rotates adding up to the width of the mode can
7649 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7650 && TREE_CODE (arg0) == RROTATE_EXPR
7651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7652 && TREE_INT_CST_HIGH (arg1) == 0
7653 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7654 && ((TREE_INT_CST_LOW (arg1)
7655 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7656 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7657 return TREE_OPERAND (arg0, 0);
7662 if (operand_equal_p (arg0, arg1, 0))
7663 return omit_one_operand (type, arg0, arg1);
7664 if (INTEGRAL_TYPE_P (type)
7665 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7666 return omit_one_operand (type, arg1, arg0);
7670 if (operand_equal_p (arg0, arg1, 0))
7671 return omit_one_operand (type, arg0, arg1);
7672 if (INTEGRAL_TYPE_P (type)
7673 && TYPE_MAX_VALUE (type)
7674 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7675 return omit_one_operand (type, arg1, arg0);
7678 case TRUTH_NOT_EXPR:
7679 /* The argument to invert_truthvalue must have Boolean type. */
7680 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7681 arg0 = fold_convert (boolean_type_node, arg0);
7683 /* Note that the operand of this must be an int
7684 and its values must be 0 or 1.
7685 ("true" is a fixed value perhaps depending on the language,
7686 but we don't handle values other than 1 correctly yet.) */
7687 tem = invert_truthvalue (arg0);
7688 /* Avoid infinite recursion. */
7689 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7691 tem = fold_single_bit_test (code, arg0, arg1, type);
7696 return fold_convert (type, tem);
7698 case TRUTH_ANDIF_EXPR:
7699 /* Note that the operands of this must be ints
7700 and their values must be 0 or 1.
7701 ("true" is a fixed value perhaps depending on the language.) */
7702 /* If first arg is constant zero, return it. */
7703 if (integer_zerop (arg0))
7704 return fold_convert (type, arg0);
7705 case TRUTH_AND_EXPR:
7706 /* If either arg is constant true, drop it. */
7707 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7708 return non_lvalue (fold_convert (type, arg1));
7709 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7710 /* Preserve sequence points. */
7711 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7712 return non_lvalue (fold_convert (type, arg0));
7713 /* If second arg is constant zero, result is zero, but first arg
7714 must be evaluated. */
7715 if (integer_zerop (arg1))
7716 return omit_one_operand (type, arg1, arg0);
7717 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7718 case will be handled here. */
7719 if (integer_zerop (arg0))
7720 return omit_one_operand (type, arg0, arg1);
7722 /* !X && X is always false. */
7723 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7724 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7725 return omit_one_operand (type, integer_zero_node, arg1);
7726 /* X && !X is always false. */
7727 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7728 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7729 return omit_one_operand (type, integer_zero_node, arg0);
7732 /* We only do these simplifications if we are optimizing. */
7736 /* Check for things like (A || B) && (A || C). We can convert this
7737 to A || (B && C). Note that either operator can be any of the four
7738 truth and/or operations and the transformation will still be
7739 valid. Also note that we only care about order for the
7740 ANDIF and ORIF operators. If B contains side effects, this
7741 might change the truth-value of A. */
7742 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7743 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7744 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7745 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7746 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7747 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7749 tree a00 = TREE_OPERAND (arg0, 0);
7750 tree a01 = TREE_OPERAND (arg0, 1);
7751 tree a10 = TREE_OPERAND (arg1, 0);
7752 tree a11 = TREE_OPERAND (arg1, 1);
7753 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7754 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7755 && (code == TRUTH_AND_EXPR
7756 || code == TRUTH_OR_EXPR));
7758 if (operand_equal_p (a00, a10, 0))
7759 return fold (build2 (TREE_CODE (arg0), type, a00,
7760 fold (build2 (code, type, a01, a11))));
7761 else if (commutative && operand_equal_p (a00, a11, 0))
7762 return fold (build2 (TREE_CODE (arg0), type, a00,
7763 fold (build2 (code, type, a01, a10))));
7764 else if (commutative && operand_equal_p (a01, a10, 0))
7765 return fold (build2 (TREE_CODE (arg0), type, a01,
7766 fold (build2 (code, type, a00, a11))));
7768 /* This case if tricky because we must either have commutative
7769 operators or else A10 must not have side-effects. */
7771 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7772 && operand_equal_p (a01, a11, 0))
7773 return fold (build2 (TREE_CODE (arg0), type,
7774 fold (build2 (code, type, a00, a10)),
7778 /* See if we can build a range comparison. */
7779 if (0 != (tem = fold_range_test (t)))
7782 /* Check for the possibility of merging component references. If our
7783 lhs is another similar operation, try to merge its rhs with our
7784 rhs. Then try to merge our lhs and rhs. */
7785 if (TREE_CODE (arg0) == code
7786 && 0 != (tem = fold_truthop (code, type,
7787 TREE_OPERAND (arg0, 1), arg1)))
7788 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7790 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7795 case TRUTH_ORIF_EXPR:
7796 /* Note that the operands of this must be ints
7797 and their values must be 0 or true.
7798 ("true" is a fixed value perhaps depending on the language.) */
7799 /* If first arg is constant true, return it. */
7800 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7801 return fold_convert (type, arg0);
7803 /* If either arg is constant zero, drop it. */
7804 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7805 return non_lvalue (fold_convert (type, arg1));
7806 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7807 /* Preserve sequence points. */
7808 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7809 return non_lvalue (fold_convert (type, arg0));
7810 /* If second arg is constant true, result is true, but we must
7811 evaluate first arg. */
7812 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7813 return omit_one_operand (type, arg1, arg0);
7814 /* Likewise for first arg, but note this only occurs here for
7816 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7817 return omit_one_operand (type, arg0, arg1);
7819 /* !X || X is always true. */
7820 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7821 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7822 return omit_one_operand (type, integer_one_node, arg1);
7823 /* X || !X is always true. */
7824 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7825 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7826 return omit_one_operand (type, integer_one_node, arg0);
7830 case TRUTH_XOR_EXPR:
7831 /* If the second arg is constant zero, drop it. */
7832 if (integer_zerop (arg1))
7833 return non_lvalue (fold_convert (type, arg0));
7834 /* If the second arg is constant true, this is a logical inversion. */
7835 if (integer_onep (arg1))
7836 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7837 /* Identical arguments cancel to zero. */
7838 if (operand_equal_p (arg0, arg1, 0))
7839 return omit_one_operand (type, integer_zero_node, arg0);
7841 /* !X ^ X is always true. */
7842 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7843 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7844 return omit_one_operand (type, integer_one_node, arg1);
7846 /* X ^ !X is always true. */
7847 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7848 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7849 return omit_one_operand (type, integer_one_node, arg0);
7859 /* If one arg is a real or integer constant, put it last. */
7860 if (tree_swap_operands_p (arg0, arg1, true))
7861 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7863 /* If this is an equality comparison of the address of a non-weak
7864 object against zero, then we know the result. */
7865 if ((code == EQ_EXPR || code == NE_EXPR)
7866 && TREE_CODE (arg0) == ADDR_EXPR
7867 && DECL_P (TREE_OPERAND (arg0, 0))
7868 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7869 && integer_zerop (arg1))
7870 return constant_boolean_node (code != EQ_EXPR, type);
7872 /* If this is an equality comparison of the address of two non-weak,
7873 unaliased symbols neither of which are extern (since we do not
7874 have access to attributes for externs), then we know the result. */
7875 if ((code == EQ_EXPR || code == NE_EXPR)
7876 && TREE_CODE (arg0) == ADDR_EXPR
7877 && DECL_P (TREE_OPERAND (arg0, 0))
7878 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7879 && ! lookup_attribute ("alias",
7880 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7881 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7882 && TREE_CODE (arg1) == ADDR_EXPR
7883 && DECL_P (TREE_OPERAND (arg1, 0))
7884 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7885 && ! lookup_attribute ("alias",
7886 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7887 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7888 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7889 ? code == EQ_EXPR : code != EQ_EXPR,
7892 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7894 tree targ0 = strip_float_extensions (arg0);
7895 tree targ1 = strip_float_extensions (arg1);
7896 tree newtype = TREE_TYPE (targ0);
7898 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7899 newtype = TREE_TYPE (targ1);
7901 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7902 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7903 return fold (build2 (code, type, fold_convert (newtype, targ0),
7904 fold_convert (newtype, targ1)));
7906 /* (-a) CMP (-b) -> b CMP a */
7907 if (TREE_CODE (arg0) == NEGATE_EXPR
7908 && TREE_CODE (arg1) == NEGATE_EXPR)
7909 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7910 TREE_OPERAND (arg0, 0)));
7912 if (TREE_CODE (arg1) == REAL_CST)
7914 REAL_VALUE_TYPE cst;
7915 cst = TREE_REAL_CST (arg1);
7917 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7918 if (TREE_CODE (arg0) == NEGATE_EXPR)
7920 fold (build2 (swap_tree_comparison (code), type,
7921 TREE_OPERAND (arg0, 0),
7922 build_real (TREE_TYPE (arg1),
7923 REAL_VALUE_NEGATE (cst))));
7925 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7926 /* a CMP (-0) -> a CMP 0 */
7927 if (REAL_VALUE_MINUS_ZERO (cst))
7928 return fold (build2 (code, type, arg0,
7929 build_real (TREE_TYPE (arg1), dconst0)));
7931 /* x != NaN is always true, other ops are always false. */
7932 if (REAL_VALUE_ISNAN (cst)
7933 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7935 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7936 return omit_one_operand (type, tem, arg0);
7939 /* Fold comparisons against infinity. */
7940 if (REAL_VALUE_ISINF (cst))
7942 tem = fold_inf_compare (code, type, arg0, arg1);
7943 if (tem != NULL_TREE)
7948 /* If this is a comparison of a real constant with a PLUS_EXPR
7949 or a MINUS_EXPR of a real constant, we can convert it into a
7950 comparison with a revised real constant as long as no overflow
7951 occurs when unsafe_math_optimizations are enabled. */
7952 if (flag_unsafe_math_optimizations
7953 && TREE_CODE (arg1) == REAL_CST
7954 && (TREE_CODE (arg0) == PLUS_EXPR
7955 || TREE_CODE (arg0) == MINUS_EXPR)
7956 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7957 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7958 ? MINUS_EXPR : PLUS_EXPR,
7959 arg1, TREE_OPERAND (arg0, 1), 0))
7960 && ! TREE_CONSTANT_OVERFLOW (tem))
7961 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7963 /* Likewise, we can simplify a comparison of a real constant with
7964 a MINUS_EXPR whose first operand is also a real constant, i.e.
7965 (c1 - x) < c2 becomes x > c1-c2. */
7966 if (flag_unsafe_math_optimizations
7967 && TREE_CODE (arg1) == REAL_CST
7968 && TREE_CODE (arg0) == MINUS_EXPR
7969 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7970 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7972 && ! TREE_CONSTANT_OVERFLOW (tem))
7973 return fold (build2 (swap_tree_comparison (code), type,
7974 TREE_OPERAND (arg0, 1), tem));
7976 /* Fold comparisons against built-in math functions. */
7977 if (TREE_CODE (arg1) == REAL_CST
7978 && flag_unsafe_math_optimizations
7979 && ! flag_errno_math)
7981 enum built_in_function fcode = builtin_mathfn_code (arg0);
7983 if (fcode != END_BUILTINS)
7985 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7986 if (tem != NULL_TREE)
7992 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7993 if (TREE_CONSTANT (arg1)
7994 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7995 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7996 /* This optimization is invalid for ordered comparisons
7997 if CONST+INCR overflows or if foo+incr might overflow.
7998 This optimization is invalid for floating point due to rounding.
7999 For pointer types we assume overflow doesn't happen. */
8000 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8001 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8002 && (code == EQ_EXPR || code == NE_EXPR))))
8004 tree varop, newconst;
8006 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8008 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8009 arg1, TREE_OPERAND (arg0, 1)));
8010 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8011 TREE_OPERAND (arg0, 0),
8012 TREE_OPERAND (arg0, 1));
8016 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8017 arg1, TREE_OPERAND (arg0, 1)));
8018 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8019 TREE_OPERAND (arg0, 0),
8020 TREE_OPERAND (arg0, 1));
8024 /* If VAROP is a reference to a bitfield, we must mask
8025 the constant by the width of the field. */
8026 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8027 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8028 && host_integerp (DECL_SIZE (TREE_OPERAND
8029 (TREE_OPERAND (varop, 0), 1)), 1))
8031 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8032 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8033 tree folded_compare, shift;
8035 /* First check whether the comparison would come out
8036 always the same. If we don't do that we would
8037 change the meaning with the masking. */
8038 folded_compare = fold (build2 (code, type,
8039 TREE_OPERAND (varop, 0), arg1));
8040 if (integer_zerop (folded_compare)
8041 || integer_onep (folded_compare))
8042 return omit_one_operand (type, folded_compare, varop);
8044 shift = build_int_cst (NULL_TREE,
8045 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8046 shift = fold_convert (TREE_TYPE (varop), shift);
8047 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8049 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8053 return fold (build2 (code, type, varop, newconst));
8056 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8057 This transformation affects the cases which are handled in later
8058 optimizations involving comparisons with non-negative constants. */
8059 if (TREE_CODE (arg1) == INTEGER_CST
8060 && TREE_CODE (arg0) != INTEGER_CST
8061 && tree_int_cst_sgn (arg1) > 0)
8066 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8067 return fold (build2 (GT_EXPR, type, arg0, arg1));
8070 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8071 return fold (build2 (LE_EXPR, type, arg0, arg1));
8078 /* Comparisons with the highest or lowest possible integer of
8079 the specified size will have known values.
8081 This is quite similar to fold_relational_hi_lo; however, my
8082 attempts to share the code have been nothing but trouble.
8083 I give up for now. */
8085 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8087 if (TREE_CODE (arg1) == INTEGER_CST
8088 && ! TREE_CONSTANT_OVERFLOW (arg1)
8089 && width <= HOST_BITS_PER_WIDE_INT
8090 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8091 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8093 unsigned HOST_WIDE_INT signed_max;
8094 unsigned HOST_WIDE_INT max, min;
8096 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8098 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8100 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8106 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8109 if (TREE_INT_CST_HIGH (arg1) == 0
8110 && TREE_INT_CST_LOW (arg1) == max)
8114 return omit_one_operand (type, integer_zero_node, arg0);
8117 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8120 return omit_one_operand (type, integer_one_node, arg0);
8123 return fold (build2 (NE_EXPR, type, arg0, arg1));
8125 /* The GE_EXPR and LT_EXPR cases above are not normally
8126 reached because of previous transformations. */
8131 else if (TREE_INT_CST_HIGH (arg1) == 0
8132 && TREE_INT_CST_LOW (arg1) == max - 1)
8136 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8137 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8139 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8140 return fold (build2 (NE_EXPR, type, arg0, arg1));
8144 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8145 && TREE_INT_CST_LOW (arg1) == min)
8149 return omit_one_operand (type, integer_zero_node, arg0);
8152 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8155 return omit_one_operand (type, integer_one_node, arg0);
8158 return fold (build2 (NE_EXPR, type, arg0, arg1));
8163 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8164 && TREE_INT_CST_LOW (arg1) == min + 1)
8168 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8169 return fold (build2 (NE_EXPR, type, arg0, arg1));
8171 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8172 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8177 else if (!in_gimple_form
8178 && TREE_INT_CST_HIGH (arg1) == 0
8179 && TREE_INT_CST_LOW (arg1) == signed_max
8180 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8181 /* signed_type does not work on pointer types. */
8182 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8184 /* The following case also applies to X < signed_max+1
8185 and X >= signed_max+1 because previous transformations. */
8186 if (code == LE_EXPR || code == GT_EXPR)
8189 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8190 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8192 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8193 type, fold_convert (st0, arg0),
8194 fold_convert (st1, integer_zero_node)));
8200 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8201 a MINUS_EXPR of a constant, we can convert it into a comparison with
8202 a revised constant as long as no overflow occurs. */
8203 if ((code == EQ_EXPR || code == NE_EXPR)
8204 && TREE_CODE (arg1) == INTEGER_CST
8205 && (TREE_CODE (arg0) == PLUS_EXPR
8206 || TREE_CODE (arg0) == MINUS_EXPR)
8207 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8208 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8209 ? MINUS_EXPR : PLUS_EXPR,
8210 arg1, TREE_OPERAND (arg0, 1), 0))
8211 && ! TREE_CONSTANT_OVERFLOW (tem))
8212 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8214 /* Similarly for a NEGATE_EXPR. */
8215 else if ((code == EQ_EXPR || code == NE_EXPR)
8216 && TREE_CODE (arg0) == NEGATE_EXPR
8217 && TREE_CODE (arg1) == INTEGER_CST
8218 && 0 != (tem = negate_expr (arg1))
8219 && TREE_CODE (tem) == INTEGER_CST
8220 && ! TREE_CONSTANT_OVERFLOW (tem))
8221 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8223 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8224 for !=. Don't do this for ordered comparisons due to overflow. */
8225 else if ((code == NE_EXPR || code == EQ_EXPR)
8226 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8227 return fold (build2 (code, type,
8228 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8230 /* If we are widening one operand of an integer comparison,
8231 see if the other operand is similarly being widened. Perhaps we
8232 can do the comparison in the narrower type. */
8233 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8234 && TREE_CODE (arg0) == NOP_EXPR
8235 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8236 && (code == EQ_EXPR || code == NE_EXPR
8237 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8238 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8239 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8240 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8241 || (TREE_CODE (t1) == INTEGER_CST
8242 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8243 return fold (build2 (code, type, tem,
8244 fold_convert (TREE_TYPE (tem), t1)));
8246 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8247 constant, we can simplify it. */
8248 else if (TREE_CODE (arg1) == INTEGER_CST
8249 && (TREE_CODE (arg0) == MIN_EXPR
8250 || TREE_CODE (arg0) == MAX_EXPR)
8251 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8252 return optimize_minmax_comparison (t);
8254 /* If we are comparing an ABS_EXPR with a constant, we can
8255 convert all the cases into explicit comparisons, but they may
8256 well not be faster than doing the ABS and one comparison.
8257 But ABS (X) <= C is a range comparison, which becomes a subtraction
8258 and a comparison, and is probably faster. */
8259 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8260 && TREE_CODE (arg0) == ABS_EXPR
8261 && ! TREE_SIDE_EFFECTS (arg0)
8262 && (0 != (tem = negate_expr (arg1)))
8263 && TREE_CODE (tem) == INTEGER_CST
8264 && ! TREE_CONSTANT_OVERFLOW (tem))
8265 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8266 build2 (GE_EXPR, type,
8267 TREE_OPERAND (arg0, 0), tem),
8268 build2 (LE_EXPR, type,
8269 TREE_OPERAND (arg0, 0), arg1)));
8271 /* If this is an EQ or NE comparison with zero and ARG0 is
8272 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8273 two operations, but the latter can be done in one less insn
8274 on machines that have only two-operand insns or on which a
8275 constant cannot be the first operand. */
8276 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8277 && TREE_CODE (arg0) == BIT_AND_EXPR)
8279 tree arg00 = TREE_OPERAND (arg0, 0);
8280 tree arg01 = TREE_OPERAND (arg0, 1);
8281 if (TREE_CODE (arg00) == LSHIFT_EXPR
8282 && integer_onep (TREE_OPERAND (arg00, 0)))
8284 fold (build2 (code, type,
8285 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8286 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8287 arg01, TREE_OPERAND (arg00, 1)),
8288 fold_convert (TREE_TYPE (arg0),
8291 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8292 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8294 fold (build2 (code, type,
8295 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8296 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8297 arg00, TREE_OPERAND (arg01, 1)),
8298 fold_convert (TREE_TYPE (arg0),
8303 /* If this is an NE or EQ comparison of zero against the result of a
8304 signed MOD operation whose second operand is a power of 2, make
8305 the MOD operation unsigned since it is simpler and equivalent. */
8306 if ((code == NE_EXPR || code == EQ_EXPR)
8307 && integer_zerop (arg1)
8308 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8309 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8310 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8311 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8312 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8313 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8315 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8316 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8317 fold_convert (newtype,
8318 TREE_OPERAND (arg0, 0)),
8319 fold_convert (newtype,
8320 TREE_OPERAND (arg0, 1))));
8322 return fold (build2 (code, type, newmod,
8323 fold_convert (newtype, arg1)));
8326 /* If this is an NE comparison of zero with an AND of one, remove the
8327 comparison since the AND will give the correct value. */
8328 if (code == NE_EXPR && integer_zerop (arg1)
8329 && TREE_CODE (arg0) == BIT_AND_EXPR
8330 && integer_onep (TREE_OPERAND (arg0, 1)))
8331 return fold_convert (type, arg0);
8333 /* If we have (A & C) == C where C is a power of 2, convert this into
8334 (A & C) != 0. Similarly for NE_EXPR. */
8335 if ((code == EQ_EXPR || code == NE_EXPR)
8336 && TREE_CODE (arg0) == BIT_AND_EXPR
8337 && integer_pow2p (TREE_OPERAND (arg0, 1))
8338 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8339 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8340 arg0, fold_convert (TREE_TYPE (arg0),
8341 integer_zero_node)));
8343 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8344 2, then fold the expression into shifts and logical operations. */
8345 tem = fold_single_bit_test (code, arg0, arg1, type);
8349 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8350 Similarly for NE_EXPR. */
8351 if ((code == EQ_EXPR || code == NE_EXPR)
8352 && TREE_CODE (arg0) == BIT_AND_EXPR
8353 && TREE_CODE (arg1) == INTEGER_CST
8354 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8357 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8358 arg1, build1 (BIT_NOT_EXPR,
8359 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8360 TREE_OPERAND (arg0, 1))));
8361 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8362 if (integer_nonzerop (dandnotc))
8363 return omit_one_operand (type, rslt, arg0);
8366 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8367 Similarly for NE_EXPR. */
8368 if ((code == EQ_EXPR || code == NE_EXPR)
8369 && TREE_CODE (arg0) == BIT_IOR_EXPR
8370 && TREE_CODE (arg1) == INTEGER_CST
8371 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8374 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8375 TREE_OPERAND (arg0, 1),
8376 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8377 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8378 if (integer_nonzerop (candnotd))
8379 return omit_one_operand (type, rslt, arg0);
8382 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8383 and similarly for >= into !=. */
8384 if ((code == LT_EXPR || code == GE_EXPR)
8385 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8386 && TREE_CODE (arg1) == LSHIFT_EXPR
8387 && integer_onep (TREE_OPERAND (arg1, 0)))
8388 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8389 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8390 TREE_OPERAND (arg1, 1)),
8391 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8393 else if ((code == LT_EXPR || code == GE_EXPR)
8394 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8395 && (TREE_CODE (arg1) == NOP_EXPR
8396 || TREE_CODE (arg1) == CONVERT_EXPR)
8397 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8398 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8400 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8401 fold_convert (TREE_TYPE (arg0),
8402 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8403 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8405 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8407 /* Simplify comparison of something with itself. (For IEEE
8408 floating-point, we can only do some of these simplifications.) */
8409 if (operand_equal_p (arg0, arg1, 0))
8414 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8415 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8416 return constant_boolean_node (1, type);
8421 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8422 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8423 return constant_boolean_node (1, type);
8424 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8427 /* For NE, we can only do this simplification if integer
8428 or we don't honor IEEE floating point NaNs. */
8429 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8430 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8432 /* ... fall through ... */
8435 return constant_boolean_node (0, type);
8441 /* If we are comparing an expression that just has comparisons
8442 of two integer values, arithmetic expressions of those comparisons,
8443 and constants, we can simplify it. There are only three cases
8444 to check: the two values can either be equal, the first can be
8445 greater, or the second can be greater. Fold the expression for
8446 those three values. Since each value must be 0 or 1, we have
8447 eight possibilities, each of which corresponds to the constant 0
8448 or 1 or one of the six possible comparisons.
8450 This handles common cases like (a > b) == 0 but also handles
8451 expressions like ((x > y) - (y > x)) > 0, which supposedly
8452 occur in macroized code. */
8454 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8456 tree cval1 = 0, cval2 = 0;
8459 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8460 /* Don't handle degenerate cases here; they should already
8461 have been handled anyway. */
8462 && cval1 != 0 && cval2 != 0
8463 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8464 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8465 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8466 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8467 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8468 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8469 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8471 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8472 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8474 /* We can't just pass T to eval_subst in case cval1 or cval2
8475 was the same as ARG1. */
8478 = fold (build2 (code, type,
8479 eval_subst (arg0, cval1, maxval,
8483 = fold (build2 (code, type,
8484 eval_subst (arg0, cval1, maxval,
8488 = fold (build2 (code, type,
8489 eval_subst (arg0, cval1, minval,
8493 /* All three of these results should be 0 or 1. Confirm they
8494 are. Then use those values to select the proper code
8497 if ((integer_zerop (high_result)
8498 || integer_onep (high_result))
8499 && (integer_zerop (equal_result)
8500 || integer_onep (equal_result))
8501 && (integer_zerop (low_result)
8502 || integer_onep (low_result)))
8504 /* Make a 3-bit mask with the high-order bit being the
8505 value for `>', the next for '=', and the low for '<'. */
8506 switch ((integer_onep (high_result) * 4)
8507 + (integer_onep (equal_result) * 2)
8508 + integer_onep (low_result))
8512 return omit_one_operand (type, integer_zero_node, arg0);
8533 return omit_one_operand (type, integer_one_node, arg0);
8536 tem = build2 (code, type, cval1, cval2);
8538 return save_expr (tem);
8545 /* If this is a comparison of a field, we may be able to simplify it. */
8546 if (((TREE_CODE (arg0) == COMPONENT_REF
8547 && lang_hooks.can_use_bit_fields_p ())
8548 || TREE_CODE (arg0) == BIT_FIELD_REF)
8549 && (code == EQ_EXPR || code == NE_EXPR)
8550 /* Handle the constant case even without -O
8551 to make sure the warnings are given. */
8552 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8554 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8559 /* If this is a comparison of complex values and either or both sides
8560 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8561 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8562 This may prevent needless evaluations. */
8563 if ((code == EQ_EXPR || code == NE_EXPR)
8564 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8565 && (TREE_CODE (arg0) == COMPLEX_EXPR
8566 || TREE_CODE (arg1) == COMPLEX_EXPR
8567 || TREE_CODE (arg0) == COMPLEX_CST
8568 || TREE_CODE (arg1) == COMPLEX_CST))
8570 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8571 tree real0, imag0, real1, imag1;
8573 arg0 = save_expr (arg0);
8574 arg1 = save_expr (arg1);
8575 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8576 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8577 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8578 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8580 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8583 fold (build2 (code, type, real0, real1)),
8584 fold (build2 (code, type, imag0, imag1))));
8587 /* Optimize comparisons of strlen vs zero to a compare of the
8588 first character of the string vs zero. To wit,
8589 strlen(ptr) == 0 => *ptr == 0
8590 strlen(ptr) != 0 => *ptr != 0
8591 Other cases should reduce to one of these two (or a constant)
8592 due to the return value of strlen being unsigned. */
8593 if ((code == EQ_EXPR || code == NE_EXPR)
8594 && integer_zerop (arg1)
8595 && TREE_CODE (arg0) == CALL_EXPR)
8597 tree fndecl = get_callee_fndecl (arg0);
8601 && DECL_BUILT_IN (fndecl)
8602 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8603 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8604 && (arglist = TREE_OPERAND (arg0, 1))
8605 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8606 && ! TREE_CHAIN (arglist))
8607 return fold (build2 (code, type,
8608 build1 (INDIRECT_REF, char_type_node,
8609 TREE_VALUE (arglist)),
8610 fold_convert (char_type_node,
8611 integer_zero_node)));
8614 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8615 into a single range test. */
8616 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8617 && TREE_CODE (arg1) == INTEGER_CST
8618 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8619 && !integer_zerop (TREE_OPERAND (arg0, 1))
8620 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8621 && !TREE_OVERFLOW (arg1))
8623 t1 = fold_div_compare (code, type, arg0, arg1);
8624 if (t1 != NULL_TREE)
8628 if ((code == EQ_EXPR || code == NE_EXPR)
8629 && !TREE_SIDE_EFFECTS (arg0)
8630 && integer_zerop (arg1)
8631 && tree_expr_nonzero_p (arg0))
8632 return constant_boolean_node (code==NE_EXPR, type);
8634 t1 = fold_relational_const (code, type, arg0, arg1);
8635 return t1 == NULL_TREE ? t : t1;
8637 case UNORDERED_EXPR:
8645 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8647 t1 = fold_relational_const (code, type, arg0, arg1);
8648 if (t1 != NULL_TREE)
8652 /* If the first operand is NaN, the result is constant. */
8653 if (TREE_CODE (arg0) == REAL_CST
8654 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8655 && (code != LTGT_EXPR || ! flag_trapping_math))
8657 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8660 return omit_one_operand (type, t1, arg1);
8663 /* If the second operand is NaN, the result is constant. */
8664 if (TREE_CODE (arg1) == REAL_CST
8665 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8666 && (code != LTGT_EXPR || ! flag_trapping_math))
8668 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8671 return omit_one_operand (type, t1, arg0);
8674 /* Simplify unordered comparison of something with itself. */
8675 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8676 && operand_equal_p (arg0, arg1, 0))
8677 return constant_boolean_node (1, type);
8679 if (code == LTGT_EXPR
8680 && !flag_trapping_math
8681 && operand_equal_p (arg0, arg1, 0))
8682 return constant_boolean_node (0, type);
8684 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8686 tree targ0 = strip_float_extensions (arg0);
8687 tree targ1 = strip_float_extensions (arg1);
8688 tree newtype = TREE_TYPE (targ0);
8690 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8691 newtype = TREE_TYPE (targ1);
8693 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8694 return fold (build2 (code, type, fold_convert (newtype, targ0),
8695 fold_convert (newtype, targ1)));
8701 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8702 so all simple results must be passed through pedantic_non_lvalue. */
8703 if (TREE_CODE (arg0) == INTEGER_CST)
8705 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8706 /* Only optimize constant conditions when the selected branch
8707 has the same type as the COND_EXPR. This avoids optimizing
8708 away "c ? x : throw", where the throw has a void type. */
8709 if (! VOID_TYPE_P (TREE_TYPE (tem))
8710 || VOID_TYPE_P (type))
8711 return pedantic_non_lvalue (tem);
8714 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8715 return pedantic_omit_one_operand (type, arg1, arg0);
8717 /* If we have A op B ? A : C, we may be able to convert this to a
8718 simpler expression, depending on the operation and the values
8719 of B and C. Signed zeros prevent all of these transformations,
8720 for reasons given above each one.
8722 Also try swapping the arguments and inverting the conditional. */
8723 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8724 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8725 arg1, TREE_OPERAND (arg0, 1))
8726 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8728 tem = fold_cond_expr_with_comparison (type, arg0,
8729 TREE_OPERAND (t, 1),
8730 TREE_OPERAND (t, 2));
8735 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8736 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8737 TREE_OPERAND (t, 2),
8738 TREE_OPERAND (arg0, 1))
8739 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8741 tem = invert_truthvalue (arg0);
8742 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8744 tem = fold_cond_expr_with_comparison (type, tem,
8745 TREE_OPERAND (t, 2),
8746 TREE_OPERAND (t, 1));
8752 /* If the second operand is simpler than the third, swap them
8753 since that produces better jump optimization results. */
8754 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8755 TREE_OPERAND (t, 2), false))
8757 /* See if this can be inverted. If it can't, possibly because
8758 it was a floating-point inequality comparison, don't do
8760 tem = invert_truthvalue (arg0);
8762 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8763 return fold (build3 (code, type, tem,
8764 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8767 /* Convert A ? 1 : 0 to simply A. */
8768 if (integer_onep (TREE_OPERAND (t, 1))
8769 && integer_zerop (TREE_OPERAND (t, 2))
8770 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8771 call to fold will try to move the conversion inside
8772 a COND, which will recurse. In that case, the COND_EXPR
8773 is probably the best choice, so leave it alone. */
8774 && type == TREE_TYPE (arg0))
8775 return pedantic_non_lvalue (arg0);
8777 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8778 over COND_EXPR in cases such as floating point comparisons. */
8779 if (integer_zerop (TREE_OPERAND (t, 1))
8780 && integer_onep (TREE_OPERAND (t, 2))
8781 && truth_value_p (TREE_CODE (arg0)))
8782 return pedantic_non_lvalue (fold_convert (type,
8783 invert_truthvalue (arg0)));
8785 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8786 if (TREE_CODE (arg0) == LT_EXPR
8787 && integer_zerop (TREE_OPERAND (arg0, 1))
8788 && integer_zerop (TREE_OPERAND (t, 2))
8789 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8790 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8791 TREE_TYPE (tem), tem, arg1)));
8793 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8794 already handled above. */
8795 if (TREE_CODE (arg0) == BIT_AND_EXPR
8796 && integer_onep (TREE_OPERAND (arg0, 1))
8797 && integer_zerop (TREE_OPERAND (t, 2))
8798 && integer_pow2p (arg1))
8800 tree tem = TREE_OPERAND (arg0, 0);
8802 if (TREE_CODE (tem) == RSHIFT_EXPR
8803 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
8804 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8805 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8806 return fold (build2 (BIT_AND_EXPR, type,
8807 TREE_OPERAND (tem, 0), arg1));
8810 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8811 is probably obsolete because the first operand should be a
8812 truth value (that's why we have the two cases above), but let's
8813 leave it in until we can confirm this for all front-ends. */
8814 if (integer_zerop (TREE_OPERAND (t, 2))
8815 && TREE_CODE (arg0) == NE_EXPR
8816 && integer_zerop (TREE_OPERAND (arg0, 1))
8817 && integer_pow2p (arg1)
8818 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8819 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8820 arg1, OEP_ONLY_CONST))
8821 return pedantic_non_lvalue (fold_convert (type,
8822 TREE_OPERAND (arg0, 0)));
8824 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8825 if (integer_zerop (TREE_OPERAND (t, 2))
8826 && truth_value_p (TREE_CODE (arg0))
8827 && truth_value_p (TREE_CODE (arg1)))
8828 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8830 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8831 if (integer_onep (TREE_OPERAND (t, 2))
8832 && truth_value_p (TREE_CODE (arg0))
8833 && truth_value_p (TREE_CODE (arg1)))
8835 /* Only perform transformation if ARG0 is easily inverted. */
8836 tem = invert_truthvalue (arg0);
8837 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8838 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8841 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8842 if (integer_zerop (arg1)
8843 && truth_value_p (TREE_CODE (arg0))
8844 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8846 /* Only perform transformation if ARG0 is easily inverted. */
8847 tem = invert_truthvalue (arg0);
8848 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8849 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8850 TREE_OPERAND (t, 2)));
8853 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8854 if (integer_onep (arg1)
8855 && truth_value_p (TREE_CODE (arg0))
8856 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8857 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8858 TREE_OPERAND (t, 2)));
8863 /* When pedantic, a compound expression can be neither an lvalue
8864 nor an integer constant expression. */
8865 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8867 /* Don't let (0, 0) be null pointer constant. */
8868 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8869 : fold_convert (type, arg1);
8870 return pedantic_non_lvalue (tem);
8874 return build_complex (type, arg0, arg1);
8878 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8880 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8881 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8882 TREE_OPERAND (arg0, 1));
8883 else if (TREE_CODE (arg0) == COMPLEX_CST)
8884 return TREE_REALPART (arg0);
8885 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8886 return fold (build2 (TREE_CODE (arg0), type,
8887 fold (build1 (REALPART_EXPR, type,
8888 TREE_OPERAND (arg0, 0))),
8889 fold (build1 (REALPART_EXPR, type,
8890 TREE_OPERAND (arg0, 1)))));
8894 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8895 return fold_convert (type, integer_zero_node);
8896 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8897 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8898 TREE_OPERAND (arg0, 0));
8899 else if (TREE_CODE (arg0) == COMPLEX_CST)
8900 return TREE_IMAGPART (arg0);
8901 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8902 return fold (build2 (TREE_CODE (arg0), type,
8903 fold (build1 (IMAGPART_EXPR, type,
8904 TREE_OPERAND (arg0, 0))),
8905 fold (build1 (IMAGPART_EXPR, type,
8906 TREE_OPERAND (arg0, 1)))));
8909 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8911 case CLEANUP_POINT_EXPR:
8912 if (! has_cleanups (arg0))
8913 return TREE_OPERAND (t, 0);
8916 enum tree_code code0 = TREE_CODE (arg0);
8917 int kind0 = TREE_CODE_CLASS (code0);
8918 tree arg00 = TREE_OPERAND (arg0, 0);
8921 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8922 return fold (build1 (code0, type,
8923 fold (build1 (CLEANUP_POINT_EXPR,
8924 TREE_TYPE (arg00), arg00))));
8926 if (kind0 == '<' || kind0 == '2'
8927 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8928 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8929 || code0 == TRUTH_XOR_EXPR)
8931 arg01 = TREE_OPERAND (arg0, 1);
8933 if (TREE_CONSTANT (arg00)
8934 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8935 && ! has_cleanups (arg00)))
8936 return fold (build2 (code0, type, arg00,
8937 fold (build1 (CLEANUP_POINT_EXPR,
8938 TREE_TYPE (arg01), arg01))));
8940 if (TREE_CONSTANT (arg01))
8941 return fold (build2 (code0, type,
8942 fold (build1 (CLEANUP_POINT_EXPR,
8943 TREE_TYPE (arg00), arg00)),
8951 /* Check for a built-in function. */
8952 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8953 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8955 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8957 tree tmp = fold_builtin (t, false);
8965 } /* switch (code) */
8968 #ifdef ENABLE_FOLD_CHECKING
8971 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8972 static void fold_check_failed (tree, tree);
8973 void print_fold_checksum (tree);
8975 /* When --enable-checking=fold, compute a digest of expr before
8976 and after actual fold call to see if fold did not accidentally
8977 change original expr. */
8984 unsigned char checksum_before[16], checksum_after[16];
8987 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8988 md5_init_ctx (&ctx);
8989 fold_checksum_tree (expr, &ctx, ht);
8990 md5_finish_ctx (&ctx, checksum_before);
8993 ret = fold_1 (expr);
8995 md5_init_ctx (&ctx);
8996 fold_checksum_tree (expr, &ctx, ht);
8997 md5_finish_ctx (&ctx, checksum_after);
9000 if (memcmp (checksum_before, checksum_after, 16))
9001 fold_check_failed (expr, ret);
9007 print_fold_checksum (tree expr)
9010 unsigned char checksum[16], cnt;
9013 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9014 md5_init_ctx (&ctx);
9015 fold_checksum_tree (expr, &ctx, ht);
9016 md5_finish_ctx (&ctx, checksum);
9018 for (cnt = 0; cnt < 16; ++cnt)
9019 fprintf (stderr, "%02x", checksum[cnt]);
9020 putc ('\n', stderr);
9024 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9026 internal_error ("fold check: original tree changed by fold");
9030 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9033 enum tree_code code;
9034 char buf[sizeof (struct tree_decl)];
9037 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9038 <= sizeof (struct tree_decl))
9039 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9042 slot = htab_find_slot (ht, expr, INSERT);
9046 code = TREE_CODE (expr);
9047 if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9049 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9050 memcpy (buf, expr, tree_size (expr));
9052 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9054 else if (TREE_CODE_CLASS (code) == 't'
9055 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9057 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9058 memcpy (buf, expr, tree_size (expr));
9060 TYPE_POINTER_TO (expr) = NULL;
9061 TYPE_REFERENCE_TO (expr) = NULL;
9063 md5_process_bytes (expr, tree_size (expr), ctx);
9064 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9065 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9066 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9067 switch (TREE_CODE_CLASS (code))
9073 md5_process_bytes (TREE_STRING_POINTER (expr),
9074 TREE_STRING_LENGTH (expr), ctx);
9077 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9078 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9081 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9091 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9092 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9095 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9096 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9108 len = first_rtl_op (code);
9109 for (i = 0; i < len; ++i)
9110 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9113 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9114 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9115 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9116 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9117 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9118 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9119 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9120 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9121 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9122 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9123 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9126 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9127 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9128 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9129 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9130 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9131 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9132 if (INTEGRAL_TYPE_P (expr)
9133 || SCALAR_FLOAT_TYPE_P (expr))
9135 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9136 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9138 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9139 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9140 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9149 /* Perform constant folding and related simplification of initializer
9150 expression EXPR. This behaves identically to "fold" but ignores
9151 potential run-time traps and exceptions that fold must preserve. */
9154 fold_initializer (tree expr)
9156 int saved_signaling_nans = flag_signaling_nans;
9157 int saved_trapping_math = flag_trapping_math;
9158 int saved_trapv = flag_trapv;
9161 flag_signaling_nans = 0;
9162 flag_trapping_math = 0;
9165 result = fold (expr);
9167 flag_signaling_nans = saved_signaling_nans;
9168 flag_trapping_math = saved_trapping_math;
9169 flag_trapv = saved_trapv;
9174 /* Determine if first argument is a multiple of second argument. Return 0 if
9175 it is not, or we cannot easily determined it to be.
9177 An example of the sort of thing we care about (at this point; this routine
9178 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9179 fold cases do now) is discovering that
9181 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9187 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9189 This code also handles discovering that
9191 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9193 is a multiple of 8 so we don't have to worry about dealing with a
9196 Note that we *look* inside a SAVE_EXPR only to determine how it was
9197 calculated; it is not safe for fold to do much of anything else with the
9198 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9199 at run time. For example, the latter example above *cannot* be implemented
9200 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9201 evaluation time of the original SAVE_EXPR is not necessarily the same at
9202 the time the new expression is evaluated. The only optimization of this
9203 sort that would be valid is changing
9205 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9209 SAVE_EXPR (I) * SAVE_EXPR (J)
9211 (where the same SAVE_EXPR (J) is used in the original and the
9212 transformed version). */
9215 multiple_of_p (tree type, tree top, tree bottom)
9217 if (operand_equal_p (top, bottom, 0))
9220 if (TREE_CODE (type) != INTEGER_TYPE)
9223 switch (TREE_CODE (top))
9226 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9227 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9231 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9232 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9235 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9239 op1 = TREE_OPERAND (top, 1);
9240 /* const_binop may not detect overflow correctly,
9241 so check for it explicitly here. */
9242 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9243 > TREE_INT_CST_LOW (op1)
9244 && TREE_INT_CST_HIGH (op1) == 0
9245 && 0 != (t1 = fold_convert (type,
9246 const_binop (LSHIFT_EXPR,
9249 && ! TREE_OVERFLOW (t1))
9250 return multiple_of_p (type, t1, bottom);
9255 /* Can't handle conversions from non-integral or wider integral type. */
9256 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9257 || (TYPE_PRECISION (type)
9258 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9261 /* .. fall through ... */
9264 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9267 if (TREE_CODE (bottom) != INTEGER_CST
9268 || (TYPE_UNSIGNED (type)
9269 && (tree_int_cst_sgn (top) < 0
9270 || tree_int_cst_sgn (bottom) < 0)))
9272 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9280 /* Return true if `t' is known to be non-negative. */
9283 tree_expr_nonnegative_p (tree t)
9285 switch (TREE_CODE (t))
9291 return tree_int_cst_sgn (t) >= 0;
9294 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9297 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9298 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9299 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9301 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9302 both unsigned and at least 2 bits shorter than the result. */
9303 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9304 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9305 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9307 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9308 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9309 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9310 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9312 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9313 TYPE_PRECISION (inner2)) + 1;
9314 return prec < TYPE_PRECISION (TREE_TYPE (t));
9320 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9322 /* x * x for floating point x is always non-negative. */
9323 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9325 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9326 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9329 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9330 both unsigned and their total bits is shorter than the result. */
9331 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9332 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9333 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9335 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9336 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9337 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9338 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9339 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9340 < TYPE_PRECISION (TREE_TYPE (t));
9344 case TRUNC_DIV_EXPR:
9346 case FLOOR_DIV_EXPR:
9347 case ROUND_DIV_EXPR:
9348 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9349 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9351 case TRUNC_MOD_EXPR:
9353 case FLOOR_MOD_EXPR:
9354 case ROUND_MOD_EXPR:
9355 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9358 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9359 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9362 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9363 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9366 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9367 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9371 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9372 tree outer_type = TREE_TYPE (t);
9374 if (TREE_CODE (outer_type) == REAL_TYPE)
9376 if (TREE_CODE (inner_type) == REAL_TYPE)
9377 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9378 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9380 if (TYPE_UNSIGNED (inner_type))
9382 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9385 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9387 if (TREE_CODE (inner_type) == REAL_TYPE)
9388 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9389 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9390 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9391 && TYPE_UNSIGNED (inner_type);
9397 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9398 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9400 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9402 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9403 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9405 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9406 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9408 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9410 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9412 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9413 case NON_LVALUE_EXPR:
9414 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9416 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9420 tree temp = TARGET_EXPR_SLOT (t);
9421 t = TARGET_EXPR_INITIAL (t);
9423 /* If the initializer is non-void, then it's a normal expression
9424 that will be assigned to the slot. */
9425 if (!VOID_TYPE_P (t))
9426 return tree_expr_nonnegative_p (t);
9428 /* Otherwise, the initializer sets the slot in some way. One common
9429 way is an assignment statement at the end of the initializer. */
9432 if (TREE_CODE (t) == BIND_EXPR)
9433 t = expr_last (BIND_EXPR_BODY (t));
9434 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9435 || TREE_CODE (t) == TRY_CATCH_EXPR)
9436 t = expr_last (TREE_OPERAND (t, 0));
9437 else if (TREE_CODE (t) == STATEMENT_LIST)
9442 if (TREE_CODE (t) == MODIFY_EXPR
9443 && TREE_OPERAND (t, 0) == temp)
9444 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9451 tree fndecl = get_callee_fndecl (t);
9452 tree arglist = TREE_OPERAND (t, 1);
9454 && DECL_BUILT_IN (fndecl)
9455 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9456 switch (DECL_FUNCTION_CODE (fndecl))
9458 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9459 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9460 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9461 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9463 CASE_BUILTIN_F (BUILT_IN_ACOS)
9464 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9465 CASE_BUILTIN_F (BUILT_IN_CABS)
9466 CASE_BUILTIN_F (BUILT_IN_COSH)
9467 CASE_BUILTIN_F (BUILT_IN_ERFC)
9468 CASE_BUILTIN_F (BUILT_IN_EXP)
9469 CASE_BUILTIN_F (BUILT_IN_EXP10)
9470 CASE_BUILTIN_F (BUILT_IN_EXP2)
9471 CASE_BUILTIN_F (BUILT_IN_FABS)
9472 CASE_BUILTIN_F (BUILT_IN_FDIM)
9473 CASE_BUILTIN_F (BUILT_IN_FREXP)
9474 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9475 CASE_BUILTIN_F (BUILT_IN_POW10)
9476 CASE_BUILTIN_I (BUILT_IN_FFS)
9477 CASE_BUILTIN_I (BUILT_IN_PARITY)
9478 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9482 CASE_BUILTIN_F (BUILT_IN_SQRT)
9483 /* sqrt(-0.0) is -0.0. */
9484 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9486 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9488 CASE_BUILTIN_F (BUILT_IN_ASINH)
9489 CASE_BUILTIN_F (BUILT_IN_ATAN)
9490 CASE_BUILTIN_F (BUILT_IN_ATANH)
9491 CASE_BUILTIN_F (BUILT_IN_CBRT)
9492 CASE_BUILTIN_F (BUILT_IN_CEIL)
9493 CASE_BUILTIN_F (BUILT_IN_ERF)
9494 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9495 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9496 CASE_BUILTIN_F (BUILT_IN_FMOD)
9497 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9498 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9499 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9500 CASE_BUILTIN_F (BUILT_IN_LRINT)
9501 CASE_BUILTIN_F (BUILT_IN_LROUND)
9502 CASE_BUILTIN_F (BUILT_IN_MODF)
9503 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9504 CASE_BUILTIN_F (BUILT_IN_POW)
9505 CASE_BUILTIN_F (BUILT_IN_RINT)
9506 CASE_BUILTIN_F (BUILT_IN_ROUND)
9507 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9508 CASE_BUILTIN_F (BUILT_IN_SINH)
9509 CASE_BUILTIN_F (BUILT_IN_TANH)
9510 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9511 /* True if the 1st argument is nonnegative. */
9512 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9514 CASE_BUILTIN_F (BUILT_IN_FMAX)
9515 /* True if the 1st OR 2nd arguments are nonnegative. */
9516 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9517 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9519 CASE_BUILTIN_F (BUILT_IN_FMIN)
9520 /* True if the 1st AND 2nd arguments are nonnegative. */
9521 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9522 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9524 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9525 /* True if the 2nd argument is nonnegative. */
9526 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9530 #undef CASE_BUILTIN_F
9531 #undef CASE_BUILTIN_I
9535 /* ... fall through ... */
9538 if (truth_value_p (TREE_CODE (t)))
9539 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9543 /* We don't know sign of `t', so be conservative and return false. */
9547 /* Return true when T is an address and is known to be nonzero.
9548 For floating point we further ensure that T is not denormal.
9549 Similar logic is present in nonzero_address in rtlanal.h */
9552 tree_expr_nonzero_p (tree t)
9554 tree type = TREE_TYPE (t);
9556 /* Doing something useful for floating point would need more work. */
9557 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9560 switch (TREE_CODE (t))
9563 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9564 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9567 /* We used to test for !integer_zerop here. This does not work correctly
9568 if TREE_CONSTANT_OVERFLOW (t). */
9569 return (TREE_INT_CST_LOW (t) != 0
9570 || TREE_INT_CST_HIGH (t) != 0);
9573 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9575 /* With the presence of negative values it is hard
9576 to say something. */
9577 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9578 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9580 /* One of operands must be positive and the other non-negative. */
9581 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9582 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9587 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9589 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9590 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9596 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9597 tree outer_type = TREE_TYPE (t);
9599 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9600 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9606 tree base = get_base_address (TREE_OPERAND (t, 0));
9611 /* Weak declarations may link to NULL. */
9613 return !DECL_WEAK (base);
9615 /* Constants are never weak. */
9616 if (TREE_CODE_CLASS (TREE_CODE (base)) == 'c')
9623 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9624 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9627 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9628 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9631 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9633 /* When both operands are nonzero, then MAX must be too. */
9634 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9637 /* MAX where operand 0 is positive is positive. */
9638 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9640 /* MAX where operand 1 is positive is positive. */
9641 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9642 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9649 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9652 case NON_LVALUE_EXPR:
9653 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9656 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9657 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9665 /* See if we are applying CODE, a relational to the highest or lowest
9666 possible integer of TYPE. If so, then the result is a compile
9670 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9675 enum tree_code code = *code_p;
9676 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9678 if (TREE_CODE (op1) == INTEGER_CST
9679 && ! TREE_CONSTANT_OVERFLOW (op1)
9680 && width <= HOST_BITS_PER_WIDE_INT
9681 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9682 || POINTER_TYPE_P (TREE_TYPE (op1))))
9684 unsigned HOST_WIDE_INT signed_max;
9685 unsigned HOST_WIDE_INT max, min;
9687 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9689 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9691 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9697 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9700 if (TREE_INT_CST_HIGH (op1) == 0
9701 && TREE_INT_CST_LOW (op1) == max)
9705 return omit_one_operand (type, integer_zero_node, op0);
9711 return omit_one_operand (type, integer_one_node, op0);
9717 /* The GE_EXPR and LT_EXPR cases above are not normally
9718 reached because of previous transformations. */
9723 else if (TREE_INT_CST_HIGH (op1) == 0
9724 && TREE_INT_CST_LOW (op1) == max - 1)
9729 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9733 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9738 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9739 && TREE_INT_CST_LOW (op1) == min)
9743 return omit_one_operand (type, integer_zero_node, op0);
9750 return omit_one_operand (type, integer_one_node, op0);
9759 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9760 && TREE_INT_CST_LOW (op1) == min + 1)
9765 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9769 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9775 else if (TREE_INT_CST_HIGH (op1) == 0
9776 && TREE_INT_CST_LOW (op1) == signed_max
9777 && TYPE_UNSIGNED (TREE_TYPE (op1))
9778 /* signed_type does not work on pointer types. */
9779 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9781 /* The following case also applies to X < signed_max+1
9782 and X >= signed_max+1 because previous transformations. */
9783 if (code == LE_EXPR || code == GT_EXPR)
9785 tree st0, st1, exp, retval;
9786 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9787 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9789 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9791 fold_convert (st0, op0),
9792 fold_convert (st1, integer_zero_node));
9795 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9797 TREE_OPERAND (exp, 0),
9798 TREE_OPERAND (exp, 1));
9800 /* If we are in gimple form, then returning EXP would create
9801 non-gimple expressions. Clearing it is safe and insures
9802 we do not allow a non-gimple expression to escape. */
9806 return (retval ? retval : exp);
9815 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9816 attempt to fold the expression to a constant without modifying TYPE,
9819 If the expression could be simplified to a constant, then return
9820 the constant. If the expression would not be simplified to a
9821 constant, then return NULL_TREE.
9823 Note this is primarily designed to be called after gimplification
9824 of the tree structures and when at least one operand is a constant.
9825 As a result of those simplifying assumptions this routine is far
9826 simpler than the generic fold routine. */
9829 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9837 /* If this is a commutative operation, and ARG0 is a constant, move it
9838 to ARG1 to reduce the number of tests below. */
9839 if (commutative_tree_code (code)
9840 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9847 /* If either operand is a complex type, extract its real component. */
9848 if (TREE_CODE (op0) == COMPLEX_CST)
9849 subop0 = TREE_REALPART (op0);
9853 if (TREE_CODE (op1) == COMPLEX_CST)
9854 subop1 = TREE_REALPART (op1);
9858 /* Note if either argument is not a real or integer constant.
9859 With a few exceptions, simplification is limited to cases
9860 where both arguments are constants. */
9861 if ((TREE_CODE (subop0) != INTEGER_CST
9862 && TREE_CODE (subop0) != REAL_CST)
9863 || (TREE_CODE (subop1) != INTEGER_CST
9864 && TREE_CODE (subop1) != REAL_CST))
9870 /* (plus (address) (const_int)) is a constant. */
9871 if (TREE_CODE (op0) == PLUS_EXPR
9872 && TREE_CODE (op1) == INTEGER_CST
9873 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9874 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9875 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9877 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9879 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9880 const_binop (PLUS_EXPR, op1,
9881 TREE_OPERAND (op0, 1), 0));
9889 /* Both arguments are constants. Simplify. */
9890 tem = const_binop (code, op0, op1, 0);
9891 if (tem != NULL_TREE)
9893 /* The return value should always have the same type as
9894 the original expression. */
9895 if (TREE_TYPE (tem) != type)
9896 tem = fold_convert (type, tem);
9903 /* Fold &x - &x. This can happen from &x.foo - &x.
9904 This is unsafe for certain floats even in non-IEEE formats.
9905 In IEEE, it is unsafe because it does wrong for NaNs.
9906 Also note that operand_equal_p is always false if an
9907 operand is volatile. */
9908 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9909 return fold_convert (type, integer_zero_node);
9915 /* Special case multiplication or bitwise AND where one argument
9917 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9918 return omit_one_operand (type, op1, op0);
9920 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9921 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9922 && real_zerop (op1))
9923 return omit_one_operand (type, op1, op0);
9928 /* Special case when we know the result will be all ones. */
9929 if (integer_all_onesp (op1))
9930 return omit_one_operand (type, op1, op0);
9934 case TRUNC_DIV_EXPR:
9935 case ROUND_DIV_EXPR:
9936 case FLOOR_DIV_EXPR:
9938 case EXACT_DIV_EXPR:
9939 case TRUNC_MOD_EXPR:
9940 case ROUND_MOD_EXPR:
9941 case FLOOR_MOD_EXPR:
9944 /* Division by zero is undefined. */
9945 if (integer_zerop (op1))
9948 if (TREE_CODE (op1) == REAL_CST
9949 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9950 && real_zerop (op1))
9956 if (INTEGRAL_TYPE_P (type)
9957 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9958 return omit_one_operand (type, op1, op0);
9963 if (INTEGRAL_TYPE_P (type)
9964 && TYPE_MAX_VALUE (type)
9965 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9966 return omit_one_operand (type, op1, op0);
9971 /* Optimize -1 >> x for arithmetic right shifts. */
9972 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9973 return omit_one_operand (type, op0, op1);
9974 /* ... fall through ... */
9977 if (integer_zerop (op0))
9978 return omit_one_operand (type, op0, op1);
9980 /* Since negative shift count is not well-defined, don't
9981 try to compute it in the compiler. */
9982 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9989 /* -1 rotated either direction by any amount is still -1. */
9990 if (integer_all_onesp (op0))
9991 return omit_one_operand (type, op0, op1);
9993 /* 0 rotated either direction by any amount is still zero. */
9994 if (integer_zerop (op0))
9995 return omit_one_operand (type, op0, op1);
10001 return build_complex (type, op0, op1);
10010 /* If one arg is a real or integer constant, put it last. */
10011 if ((TREE_CODE (op0) == INTEGER_CST
10012 && TREE_CODE (op1) != INTEGER_CST)
10013 || (TREE_CODE (op0) == REAL_CST
10014 && TREE_CODE (op0) != REAL_CST))
10021 code = swap_tree_comparison (code);
10024 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10025 This transformation affects the cases which are handled in later
10026 optimizations involving comparisons with non-negative constants. */
10027 if (TREE_CODE (op1) == INTEGER_CST
10028 && TREE_CODE (op0) != INTEGER_CST
10029 && tree_int_cst_sgn (op1) > 0)
10035 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10040 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10048 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10052 /* Fall through. */
10055 case UNORDERED_EXPR:
10065 return fold_relational_const (code, type, op0, op1);
10068 /* This could probably be handled. */
10071 case TRUTH_AND_EXPR:
10072 /* If second arg is constant zero, result is zero, but first arg
10073 must be evaluated. */
10074 if (integer_zerop (op1))
10075 return omit_one_operand (type, op1, op0);
10076 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10077 case will be handled here. */
10078 if (integer_zerop (op0))
10079 return omit_one_operand (type, op0, op1);
10080 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10081 return constant_boolean_node (true, type);
10084 case TRUTH_OR_EXPR:
10085 /* If second arg is constant true, result is true, but we must
10086 evaluate first arg. */
10087 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10088 return omit_one_operand (type, op1, op0);
10089 /* Likewise for first arg, but note this only occurs here for
10091 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10092 return omit_one_operand (type, op0, op1);
10093 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10094 return constant_boolean_node (false, type);
10097 case TRUTH_XOR_EXPR:
10098 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10100 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10101 return constant_boolean_node (x, type);
10110 /* Given the components of a unary expression CODE, TYPE and OP0,
10111 attempt to fold the expression to a constant without modifying
10114 If the expression could be simplified to a constant, then return
10115 the constant. If the expression would not be simplified to a
10116 constant, then return NULL_TREE.
10118 Note this is primarily designed to be called after gimplification
10119 of the tree structures and when op0 is a constant. As a result
10120 of those simplifying assumptions this routine is far simpler than
10121 the generic fold routine. */
10124 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10127 /* Make sure we have a suitable constant argument. */
10128 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10132 if (TREE_CODE (op0) == COMPLEX_CST)
10133 subop = TREE_REALPART (op0);
10137 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10146 case FIX_TRUNC_EXPR:
10147 case FIX_FLOOR_EXPR:
10148 case FIX_CEIL_EXPR:
10149 return fold_convert_const (code, type, op0);
10152 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10153 return fold_negate_const (op0, type);
10158 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10159 return fold_abs_const (op0, type);
10164 if (TREE_CODE (op0) == INTEGER_CST)
10165 return fold_not_const (op0, type);
10169 case REALPART_EXPR:
10170 if (TREE_CODE (op0) == COMPLEX_CST)
10171 return TREE_REALPART (op0);
10175 case IMAGPART_EXPR:
10176 if (TREE_CODE (op0) == COMPLEX_CST)
10177 return TREE_IMAGPART (op0);
10182 if (TREE_CODE (op0) == COMPLEX_CST
10183 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10184 return build_complex (type, TREE_REALPART (op0),
10185 negate_expr (TREE_IMAGPART (op0)));
10193 /* If EXP represents referencing an element in a constant string
10194 (either via pointer arithmetic or array indexing), return the
10195 tree representing the value accessed, otherwise return NULL. */
10198 fold_read_from_constant_string (tree exp)
10200 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10202 tree exp1 = TREE_OPERAND (exp, 0);
10206 if (TREE_CODE (exp) == INDIRECT_REF)
10207 string = string_constant (exp1, &index);
10210 tree low_bound = array_ref_low_bound (exp);
10211 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10213 /* Optimize the special-case of a zero lower bound.
10215 We convert the low_bound to sizetype to avoid some problems
10216 with constant folding. (E.g. suppose the lower bound is 1,
10217 and its mode is QI. Without the conversion,l (ARRAY
10218 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10219 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10220 if (! integer_zerop (low_bound))
10221 index = size_diffop (index, fold_convert (sizetype, low_bound));
10227 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10228 && TREE_CODE (string) == STRING_CST
10229 && TREE_CODE (index) == INTEGER_CST
10230 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10231 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10233 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10234 return fold_convert (TREE_TYPE (exp),
10235 build_int_cst (NULL_TREE,
10236 (TREE_STRING_POINTER (string)
10237 [TREE_INT_CST_LOW (index)])));
10242 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10243 an integer constant or real constant.
10245 TYPE is the type of the result. */
10248 fold_negate_const (tree arg0, tree type)
10250 tree t = NULL_TREE;
10252 switch (TREE_CODE (arg0))
10256 unsigned HOST_WIDE_INT low;
10257 HOST_WIDE_INT high;
10258 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10259 TREE_INT_CST_HIGH (arg0),
10261 t = build_int_cst_wide (type, low, high);
10262 t = force_fit_type (t, 1,
10263 (overflow | TREE_OVERFLOW (arg0))
10264 && !TYPE_UNSIGNED (type),
10265 TREE_CONSTANT_OVERFLOW (arg0));
10270 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10274 gcc_unreachable ();
10280 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10281 an integer constant or real constant.
10283 TYPE is the type of the result. */
10286 fold_abs_const (tree arg0, tree type)
10288 tree t = NULL_TREE;
10290 switch (TREE_CODE (arg0))
10293 /* If the value is unsigned, then the absolute value is
10294 the same as the ordinary value. */
10295 if (TYPE_UNSIGNED (type))
10297 /* Similarly, if the value is non-negative. */
10298 else if (INT_CST_LT (integer_minus_one_node, arg0))
10300 /* If the value is negative, then the absolute value is
10304 unsigned HOST_WIDE_INT low;
10305 HOST_WIDE_INT high;
10306 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10307 TREE_INT_CST_HIGH (arg0),
10309 t = build_int_cst_wide (type, low, high);
10310 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10311 TREE_CONSTANT_OVERFLOW (arg0));
10316 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10317 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10323 gcc_unreachable ();
10329 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10330 constant. TYPE is the type of the result. */
10333 fold_not_const (tree arg0, tree type)
10335 tree t = NULL_TREE;
10337 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10339 t = build_int_cst_wide (type,
10340 ~ TREE_INT_CST_LOW (arg0),
10341 ~ TREE_INT_CST_HIGH (arg0));
10342 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10343 TREE_CONSTANT_OVERFLOW (arg0));
10348 /* Given CODE, a relational operator, the target type, TYPE and two
10349 constant operands OP0 and OP1, return the result of the
10350 relational operation. If the result is not a compile time
10351 constant, then return NULL_TREE. */
10354 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10356 int result, invert;
10358 /* From here on, the only cases we handle are when the result is
10359 known to be a constant. */
10361 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10363 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10364 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10366 /* Handle the cases where either operand is a NaN. */
10367 if (real_isnan (c0) || real_isnan (c1))
10377 case UNORDERED_EXPR:
10391 if (flag_trapping_math)
10397 gcc_unreachable ();
10400 return constant_boolean_node (result, type);
10403 return constant_boolean_node (real_compare (code, c0, c1), type);
10406 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10408 To compute GT, swap the arguments and do LT.
10409 To compute GE, do LT and invert the result.
10410 To compute LE, swap the arguments, do LT and invert the result.
10411 To compute NE, do EQ and invert the result.
10413 Therefore, the code below must handle only EQ and LT. */
10415 if (code == LE_EXPR || code == GT_EXPR)
10420 code = swap_tree_comparison (code);
10423 /* Note that it is safe to invert for real values here because we
10424 have already handled the one case that it matters. */
10427 if (code == NE_EXPR || code == GE_EXPR)
10430 code = invert_tree_comparison (code, false);
10433 /* Compute a result for LT or EQ if args permit;
10434 Otherwise return T. */
10435 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10437 if (code == EQ_EXPR)
10438 result = tree_int_cst_equal (op0, op1);
10439 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10440 result = INT_CST_LT_UNSIGNED (op0, op1);
10442 result = INT_CST_LT (op0, op1);
10449 return constant_boolean_node (result, type);
10452 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10453 avoid confusing the gimplify process. */
10456 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10458 /* The size of the object is not relevant when talking about its address. */
10459 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10460 t = TREE_OPERAND (t, 0);
10462 if (TREE_CODE (t) == INDIRECT_REF)
10464 t = TREE_OPERAND (t, 0);
10465 if (TREE_TYPE (t) != ptrtype)
10466 t = build1 (NOP_EXPR, ptrtype, t);
10472 while (handled_component_p (base)
10473 || TREE_CODE (base) == REALPART_EXPR
10474 || TREE_CODE (base) == IMAGPART_EXPR)
10475 base = TREE_OPERAND (base, 0);
10477 TREE_ADDRESSABLE (base) = 1;
10479 t = build1 (ADDR_EXPR, ptrtype, t);
10486 build_fold_addr_expr (tree t)
10488 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10491 /* Builds an expression for an indirection through T, simplifying some
10495 build_fold_indirect_ref (tree t)
10497 tree type = TREE_TYPE (TREE_TYPE (t));
10502 if (TREE_CODE (sub) == ADDR_EXPR)
10504 tree op = TREE_OPERAND (sub, 0);
10505 tree optype = TREE_TYPE (op);
10507 if (lang_hooks.types_compatible_p (type, optype))
10509 /* *(foo *)&fooarray => fooarray[0] */
10510 else if (TREE_CODE (optype) == ARRAY_TYPE
10511 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10512 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10515 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10516 subtype = TREE_TYPE (sub);
10517 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10518 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10520 sub = build_fold_indirect_ref (sub);
10521 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10524 return build1 (INDIRECT_REF, type, t);
10527 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10528 whose result is ignored. The type of the returned tree need not be
10529 the same as the original expression. */
10532 fold_ignored_result (tree t)
10534 if (!TREE_SIDE_EFFECTS (t))
10535 return integer_zero_node;
10538 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10541 t = TREE_OPERAND (t, 0);
10546 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10547 t = TREE_OPERAND (t, 0);
10548 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10549 t = TREE_OPERAND (t, 1);
10555 switch (TREE_CODE (t))
10557 case COMPOUND_EXPR:
10558 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10560 t = TREE_OPERAND (t, 0);
10564 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10565 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10567 t = TREE_OPERAND (t, 0);
10580 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10581 This can only be applied to objects of a sizetype. */
10584 round_up (tree value, int divisor)
10586 tree div = NULL_TREE;
10588 gcc_assert (divisor > 0);
10592 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10593 have to do anything. Only do this when we are not given a const,
10594 because in that case, this check is more expensive than just
10596 if (TREE_CODE (value) != INTEGER_CST)
10598 div = build_int_cst (TREE_TYPE (value), divisor);
10600 if (multiple_of_p (TREE_TYPE (value), value, div))
10604 /* If divisor is a power of two, simplify this to bit manipulation. */
10605 if (divisor == (divisor & -divisor))
10609 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10610 value = size_binop (PLUS_EXPR, value, t);
10611 t = build_int_cst (TREE_TYPE (value), -divisor);
10612 value = size_binop (BIT_AND_EXPR, value, t);
10617 div = build_int_cst (TREE_TYPE (value), divisor);
10618 value = size_binop (CEIL_DIV_EXPR, value, div);
10619 value = size_binop (MULT_EXPR, value, div);
10625 /* Likewise, but round down. */
10628 round_down (tree value, int divisor)
10630 tree div = NULL_TREE;
10632 gcc_assert (divisor > 0);
10636 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10637 have to do anything. Only do this when we are not given a const,
10638 because in that case, this check is more expensive than just
10640 if (TREE_CODE (value) != INTEGER_CST)
10642 div = build_int_cst (TREE_TYPE (value), divisor);
10644 if (multiple_of_p (TREE_TYPE (value), value, div))
10648 /* If divisor is a power of two, simplify this to bit manipulation. */
10649 if (divisor == (divisor & -divisor))
10653 t = build_int_cst (TREE_TYPE (value), -divisor);
10654 value = size_binop (BIT_AND_EXPR, value, t);
10659 div = build_int_cst (TREE_TYPE (value), divisor);
10660 value = size_binop (FLOOR_DIV_EXPR, value, div);
10661 value = size_binop (MULT_EXPR, value, div);