1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
116 static tree fold_range_test (tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (tree, enum tree_code,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static tree fold_relational_hi_lo (enum tree_code *, const tree,
137 static bool tree_expr_nonzero_p (tree);
139 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
140 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
141 and SUM1. Then this yields nonzero if overflow occurred during the
144 Overflow occurs if A and B have the same sign, but A and SUM differ in
145 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
150 We do that by representing the two-word integer in 4 words, with only
151 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
152 number. The value of the word is LOWPART + HIGHPART * BASE. */
155 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
156 #define HIGHPART(x) \
157 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
158 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160 /* Unpack a two-word integer into 4 words.
161 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
162 WORDS points to the array of HOST_WIDE_INTs. */
165 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 words[0] = LOWPART (low);
168 words[1] = HIGHPART (low);
169 words[2] = LOWPART (hi);
170 words[3] = HIGHPART (hi);
173 /* Pack an array of 4 words into a two-word integer.
174 WORDS points to the array of words.
175 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 *low = words[0] + words[1] * BASE;
182 *hi = words[2] + words[3] * BASE;
185 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
186 in overflow of the value, when >0 we are only interested in signed
187 overflow, for <0 we are interested in any overflow. OVERFLOWED
188 indicates whether overflow has already occurred. CONST_OVERFLOWED
189 indicates whether constant overflow has already occurred. We force
190 T's value to be within range of T's type (by setting to 0 or 1 all
191 the bits outside the type's range). We set TREE_OVERFLOWED if,
192 OVERFLOWED is nonzero,
193 or OVERFLOWABLE is >0 and signed overflow occurs
194 or OVERFLOWABLE is <0 and any overflow occurs
195 We set TREE_CONSTANT_OVERFLOWED if,
196 CONST_OVERFLOWED is nonzero
197 or we set TREE_OVERFLOWED.
198 We return either the original T, or a copy. */
201 force_fit_type (tree t, int overflowable,
202 bool overflowed, bool overflowed_const)
204 unsigned HOST_WIDE_INT low;
207 int sign_extended_type;
209 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* Size types *are* sign extended. */
220 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
221 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224 /* First clear all bits that are beyond the type's precision. */
226 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 else if (prec > HOST_BITS_PER_WIDE_INT)
229 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 low &= ~((HOST_WIDE_INT) (-1) << prec);
237 if (!sign_extended_type)
238 /* No sign extension */;
239 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
240 /* Correct width already. */;
241 else if (prec > HOST_BITS_PER_WIDE_INT)
243 /* Sign extend top half? */
244 if (high & ((unsigned HOST_WIDE_INT)1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 else if (prec == HOST_BITS_PER_WIDE_INT)
250 if ((HOST_WIDE_INT)low < 0)
255 /* Sign extend bottom half? */
256 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
259 low |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value changed, return a new node. */
264 if (overflowed || overflowed_const
265 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 || (overflowable > 0 && sign_extended_type))
274 TREE_OVERFLOW (t) = 1;
275 TREE_CONSTANT_OVERFLOW (t) = 1;
277 else if (overflowed_const)
280 TREE_CONSTANT_OVERFLOW (t) = 1;
287 /* Add two doubleword integers with doubleword result.
288 Each argument is given as two `HOST_WIDE_INT' pieces.
289 One argument is L1 and H1; the other, L2 and H2.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
294 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
295 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 unsigned HOST_WIDE_INT l;
301 h = h1 + h2 + (l < l1);
305 return OVERFLOW_SUM_SIGN (h1, h2, h);
308 /* Negate a doubleword integer with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
311 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
315 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
321 return (*hv & h1) < 0;
331 /* Multiply two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows, assuming it's signed.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 HOST_WIDE_INT arg1[4];
343 HOST_WIDE_INT arg2[4];
344 HOST_WIDE_INT prod[4 * 2];
345 unsigned HOST_WIDE_INT carry;
347 unsigned HOST_WIDE_INT toplow, neglow;
348 HOST_WIDE_INT tophigh, neghigh;
350 encode (arg1, l1, h1);
351 encode (arg2, l2, h2);
353 memset (prod, 0, sizeof prod);
355 for (i = 0; i < 4; i++)
358 for (j = 0; j < 4; j++)
361 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
362 carry += arg1[i] * arg2[j];
363 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
365 prod[k] = LOWPART (carry);
366 carry = HIGHPART (carry);
371 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373 /* Check for overflow by calculating the top half of the answer in full;
374 it should agree with the low half's sign bit. */
375 decode (prod + 4, &toplow, &tophigh);
378 neg_double (l2, h2, &neglow, &neghigh);
379 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 neg_double (l1, h1, &neglow, &neghigh);
384 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
389 /* Shift the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Shift right if COUNT is negative.
392 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
396 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
397 HOST_WIDE_INT count, unsigned int prec,
398 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 unsigned HOST_WIDE_INT signmask;
404 rshift_double (l1, h1, -count, prec, lv, hv, arith);
408 if (SHIFT_COUNT_TRUNCATED)
411 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 /* Shifting by the host word size is undefined according to the
414 ANSI standard, so we must handle this as a special case. */
418 else if (count >= HOST_BITS_PER_WIDE_INT)
420 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
425 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
426 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
430 /* Sign extend all bits that are beyond the precision. */
432 signmask = -((prec > HOST_BITS_PER_WIDE_INT
433 ? ((unsigned HOST_WIDE_INT) *hv
434 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
435 : (*lv >> (prec - 1))) & 1);
437 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
442 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
447 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
448 *lv |= signmask << prec;
452 /* Shift the doubleword integer in L1, H1 right by COUNT places
453 keeping only PREC bits of result. COUNT must be positive.
454 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
458 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
459 HOST_WIDE_INT count, unsigned int prec,
460 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 unsigned HOST_WIDE_INT signmask;
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 if (SHIFT_COUNT_TRUNCATED)
472 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 /* Shifting by the host word size is undefined according to the
475 ANSI standard, so we must handle this as a special case. */
479 else if (count >= HOST_BITS_PER_WIDE_INT)
482 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
488 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
491 /* Zero / sign extend all bits that are beyond the precision. */
493 if (count >= (HOST_WIDE_INT)prec)
498 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
503 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
508 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
509 *lv |= signmask << (prec - count);
513 /* Rotate the doubleword integer in L1, H1 left by COUNT places
514 keeping only PREC bits of result.
515 Rotate right if COUNT is negative.
516 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
519 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
520 HOST_WIDE_INT count, unsigned int prec,
521 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 unsigned HOST_WIDE_INT s1l, s2l;
524 HOST_WIDE_INT s1h, s2h;
530 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
531 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
536 /* Rotate the doubleword integer in L1, H1 left by COUNT places
537 keeping only PREC bits of result. COUNT must be positive.
538 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
541 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
542 HOST_WIDE_INT count, unsigned int prec,
543 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 unsigned HOST_WIDE_INT s1l, s2l;
546 HOST_WIDE_INT s1h, s2h;
552 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
553 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
558 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
559 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
560 CODE is a tree code for a kind of division, one of
561 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 It controls how the quotient is rounded to an integer.
564 Return nonzero if the operation overflows.
565 UNS nonzero says do unsigned division. */
568 div_and_round_double (enum tree_code code, int uns,
569 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig,
571 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig,
573 unsigned HOST_WIDE_INT *lquo,
574 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
592 /* Calculate quotient sign and convert operands to unsigned. */
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
606 neg_double (lden, hden, &lden, &hden);
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
613 /* This unsigned division rounds toward zero. */
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
627 memset (quo, 0, sizeof quo);
629 memset (num, 0, sizeof num); /* to zero 9th element */
630 memset (den, 0, sizeof den);
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
664 scale = BASE / (den[den_hi_sig] + 1);
666 { /* scale divisor and dividend */
668 for (i = 0; i <= 4 - 1; i++)
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
677 for (i = 0; i <= 4 - 1; i++)
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
715 for (j = 0; j <= den_hi_sig; j++)
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
737 num [num_hi_sig] += carry;
740 /* Store the quotient digit. */
745 decode (quo, lquo, hquo);
748 /* If result is negative, make it so. */
750 neg_double (*lquo, *hquo, lquo, hquo);
752 /* Compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
788 case ROUND_MOD_EXPR: /* round to closest integer */
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
795 /* Get absolute values. */
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 neg_double (lden, hden, &labs_den, &habs_den);
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, <wice, &htwice);
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 /* Compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
836 /* Return true if built-in mathematical function specified by CODE
837 preserves the sign of it argument, i.e. -f(x) == f(-x). */
840 negate_mathfn_p (enum built_in_function code)
864 /* Check whether we may negate an integer constant T without causing
868 may_negate_without_overflow_p (tree t)
870 unsigned HOST_WIDE_INT val;
874 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876 type = TREE_TYPE (t);
877 if (TYPE_UNSIGNED (type))
880 prec = TYPE_PRECISION (type);
881 if (prec > HOST_BITS_PER_WIDE_INT)
883 if (TREE_INT_CST_LOW (t) != 0)
885 prec -= HOST_BITS_PER_WIDE_INT;
886 val = TREE_INT_CST_HIGH (t);
889 val = TREE_INT_CST_LOW (t);
890 if (prec < HOST_BITS_PER_WIDE_INT)
891 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
892 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
895 /* Determine whether an expression T can be cheaply negated using
896 the function negate_expr. */
899 negate_expr_p (tree t)
906 type = TREE_TYPE (t);
909 switch (TREE_CODE (t))
912 if (TYPE_UNSIGNED (type) || ! flag_trapv)
915 /* Check that -CST will not overflow type. */
916 return may_negate_without_overflow_p (t);
923 return negate_expr_p (TREE_REALPART (t))
924 && negate_expr_p (TREE_IMAGPART (t));
927 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
929 /* -(A + B) -> (-B) - A. */
930 if (negate_expr_p (TREE_OPERAND (t, 1))
931 && reorder_operands_p (TREE_OPERAND (t, 0),
932 TREE_OPERAND (t, 1)))
934 /* -(A + B) -> (-A) - B. */
935 return negate_expr_p (TREE_OPERAND (t, 0));
938 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
939 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
940 && reorder_operands_p (TREE_OPERAND (t, 0),
941 TREE_OPERAND (t, 1));
944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
950 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
951 return negate_expr_p (TREE_OPERAND (t, 1))
952 || negate_expr_p (TREE_OPERAND (t, 0));
956 /* Negate -((double)float) as (double)(-float). */
957 if (TREE_CODE (type) == REAL_TYPE)
959 tree tem = strip_float_extensions (t);
961 return negate_expr_p (tem);
966 /* Negate -f(x) as f(-x). */
967 if (negate_mathfn_p (builtin_mathfn_code (t)))
968 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
972 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
973 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 tree op1 = TREE_OPERAND (t, 1);
976 if (TREE_INT_CST_HIGH (op1) == 0
977 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
978 == TREE_INT_CST_LOW (op1))
989 /* Given T, an expression, return the negation of T. Allow for T to be
990 null, in which case return null. */
1001 type = TREE_TYPE (t);
1002 STRIP_SIGN_NOPS (t);
1004 switch (TREE_CODE (t))
1007 tem = fold_negate_const (t, type);
1008 if (! TREE_OVERFLOW (tem)
1009 || TYPE_UNSIGNED (type)
1015 tem = fold_negate_const (t, type);
1016 /* Two's complement FP formats, such as c4x, may overflow. */
1017 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1018 return fold_convert (type, tem);
1023 tree rpart = negate_expr (TREE_REALPART (t));
1024 tree ipart = negate_expr (TREE_IMAGPART (t));
1026 if ((TREE_CODE (rpart) == REAL_CST
1027 && TREE_CODE (ipart) == REAL_CST)
1028 || (TREE_CODE (rpart) == INTEGER_CST
1029 && TREE_CODE (ipart) == INTEGER_CST))
1030 return build_complex (type, rpart, ipart);
1035 return fold_convert (type, TREE_OPERAND (t, 0));
1038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 /* -(A + B) -> (-B) - A. */
1041 if (negate_expr_p (TREE_OPERAND (t, 1))
1042 && reorder_operands_p (TREE_OPERAND (t, 0),
1043 TREE_OPERAND (t, 1)))
1045 tem = negate_expr (TREE_OPERAND (t, 1));
1046 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1047 tem, TREE_OPERAND (t, 0)));
1048 return fold_convert (type, tem);
1051 /* -(A + B) -> (-A) - B. */
1052 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 tem = negate_expr (TREE_OPERAND (t, 0));
1055 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1056 tem, TREE_OPERAND (t, 1)));
1057 return fold_convert (type, tem);
1063 /* - (A - B) -> B - A */
1064 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1066 return fold_convert (type,
1067 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1068 TREE_OPERAND (t, 1),
1069 TREE_OPERAND (t, 0))));
1073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1079 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 tem = TREE_OPERAND (t, 1);
1082 if (negate_expr_p (tem))
1083 return fold_convert (type,
1084 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1085 TREE_OPERAND (t, 0),
1086 negate_expr (tem))));
1087 tem = TREE_OPERAND (t, 0);
1088 if (negate_expr_p (tem))
1089 return fold_convert (type,
1090 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1092 TREE_OPERAND (t, 1))));
1097 /* Convert -((double)float) into (double)(-float). */
1098 if (TREE_CODE (type) == REAL_TYPE)
1100 tem = strip_float_extensions (t);
1101 if (tem != t && negate_expr_p (tem))
1102 return fold_convert (type, negate_expr (tem));
1107 /* Negate -f(x) as f(-x). */
1108 if (negate_mathfn_p (builtin_mathfn_code (t))
1109 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 tree fndecl, arg, arglist;
1113 fndecl = get_callee_fndecl (t);
1114 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1115 arglist = build_tree_list (NULL_TREE, arg);
1116 return build_function_call_expr (fndecl, arglist);
1121 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1122 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 tree op1 = TREE_OPERAND (t, 1);
1125 if (TREE_INT_CST_HIGH (op1) == 0
1126 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1127 == TREE_INT_CST_LOW (op1))
1129 tree ntype = TYPE_UNSIGNED (type)
1130 ? lang_hooks.types.signed_type (type)
1131 : lang_hooks.types.unsigned_type (type);
1132 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1133 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1134 return fold_convert (type, temp);
1143 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1144 return fold_convert (type, tem);
1147 /* Split a tree IN into a constant, literal and variable parts that could be
1148 combined with CODE to make IN. "constant" means an expression with
1149 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1150 commutative arithmetic operation. Store the constant part into *CONP,
1151 the literal in *LITP and return the variable part. If a part isn't
1152 present, set it to null. If the tree does not decompose in this way,
1153 return the entire tree as the variable part and the other parts as null.
1155 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1156 case, we negate an operand that was subtracted. Except if it is a
1157 literal for which we use *MINUS_LITP instead.
1159 If NEGATE_P is true, we are negating all of IN, again except a literal
1160 for which we use *MINUS_LITP instead.
1162 If IN is itself a literal or constant, return it as appropriate.
1164 Note that we do not guarantee that any of the three values will be the
1165 same type as IN, but they will have the same signedness and mode. */
1168 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1169 tree *minus_litp, int negate_p)
1177 /* Strip any conversions that don't change the machine mode or signedness. */
1178 STRIP_SIGN_NOPS (in);
1180 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1182 else if (TREE_CODE (in) == code
1183 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1184 /* We can associate addition and subtraction together (even
1185 though the C standard doesn't say so) for integers because
1186 the value is not affected. For reals, the value might be
1187 affected, so we can't. */
1188 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1189 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 tree op0 = TREE_OPERAND (in, 0);
1192 tree op1 = TREE_OPERAND (in, 1);
1193 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1194 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196 /* First see if either of the operands is a literal, then a constant. */
1197 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1198 *litp = op0, op0 = 0;
1199 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1200 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202 if (op0 != 0 && TREE_CONSTANT (op0))
1203 *conp = op0, op0 = 0;
1204 else if (op1 != 0 && TREE_CONSTANT (op1))
1205 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207 /* If we haven't dealt with either operand, this is not a case we can
1208 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1209 if (op0 != 0 && op1 != 0)
1214 var = op1, neg_var_p = neg1_p;
1216 /* Now do any needed negations. */
1218 *minus_litp = *litp, *litp = 0;
1220 *conp = negate_expr (*conp);
1222 var = negate_expr (var);
1224 else if (TREE_CONSTANT (in))
1232 *minus_litp = *litp, *litp = 0;
1233 else if (*minus_litp)
1234 *litp = *minus_litp, *minus_litp = 0;
1235 *conp = negate_expr (*conp);
1236 var = negate_expr (var);
1242 /* Re-associate trees split by the above function. T1 and T2 are either
1243 expressions to associate or null. Return the new expression, if any. If
1244 we build an operation, do it in TYPE and with CODE. */
1247 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1254 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1255 try to fold this since we will have infinite recursion. But do
1256 deal with any NEGATE_EXPRs. */
1257 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1258 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 if (code == PLUS_EXPR)
1262 if (TREE_CODE (t1) == NEGATE_EXPR)
1263 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1264 fold_convert (type, TREE_OPERAND (t1, 0)));
1265 else if (TREE_CODE (t2) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1267 fold_convert (type, TREE_OPERAND (t2, 0)));
1268 else if (integer_zerop (t2))
1269 return fold_convert (type, t1);
1271 else if (code == MINUS_EXPR)
1273 if (integer_zerop (t2))
1274 return fold_convert (type, t1);
1277 return build2 (code, type, fold_convert (type, t1),
1278 fold_convert (type, t2));
1281 return fold (build2 (code, type, fold_convert (type, t1),
1282 fold_convert (type, t2)));
1285 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1286 to produce a new constant.
1288 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1291 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 unsigned HOST_WIDE_INT int1l, int2l;
1294 HOST_WIDE_INT int1h, int2h;
1295 unsigned HOST_WIDE_INT low;
1297 unsigned HOST_WIDE_INT garbagel;
1298 HOST_WIDE_INT garbageh;
1300 tree type = TREE_TYPE (arg1);
1301 int uns = TYPE_UNSIGNED (type);
1303 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1305 int no_overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1315 low = int1l | int2l, hi = int1h | int2h;
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1323 low = int1l & int2l, hi = int1h & int2h;
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1345 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1349 neg_double (int2l, int2h, &low, &hi);
1350 add_double (int1l, int1h, low, hi, &low, &hi);
1351 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1355 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1358 case TRUNC_DIV_EXPR:
1359 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* This is a shortcut for a common special case. */
1362 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1363 && ! TREE_CONSTANT_OVERFLOW (arg1)
1364 && ! TREE_CONSTANT_OVERFLOW (arg2)
1365 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1367 if (code == CEIL_DIV_EXPR)
1370 low = int1l / int2l, hi = 0;
1374 /* ... fall through ... */
1376 case ROUND_DIV_EXPR:
1377 if (int2h == 0 && int2l == 1)
1379 low = int1l, hi = int1h;
1382 if (int1l == int2l && int1h == int2h
1383 && ! (int1l == 0 && int1h == 0))
1388 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1389 &low, &hi, &garbagel, &garbageh);
1392 case TRUNC_MOD_EXPR:
1393 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_MOD_EXPR)
1402 low = int1l % int2l, hi = 0;
1406 /* ... fall through ... */
1408 case ROUND_MOD_EXPR:
1409 overflow = div_and_round_double (code, uns,
1410 int1l, int1h, int2l, int2h,
1411 &garbagel, &garbageh, &low, &hi);
1417 low = (((unsigned HOST_WIDE_INT) int1h
1418 < (unsigned HOST_WIDE_INT) int2h)
1419 || (((unsigned HOST_WIDE_INT) int1h
1420 == (unsigned HOST_WIDE_INT) int2h)
1423 low = (int1h < int2h
1424 || (int1h == int2h && int1l < int2l));
1426 if (low == (code == MIN_EXPR))
1427 low = int1l, hi = int1h;
1429 low = int2l, hi = int2h;
1436 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1440 /* Propagate overflow flags ourselves. */
1441 if (((!uns || is_sizetype) && overflow)
1442 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1445 TREE_OVERFLOW (t) = 1;
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1451 TREE_CONSTANT_OVERFLOW (t) = 1;
1455 t = force_fit_type (t, 1,
1456 ((!uns || is_sizetype) && overflow)
1457 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1458 TREE_CONSTANT_OVERFLOW (arg1)
1459 | TREE_CONSTANT_OVERFLOW (arg2));
1464 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1465 constant. We assume ARG1 and ARG2 have the same data type, or at least
1466 are the same kind of constant and the same machine mode.
1468 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1471 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1476 if (TREE_CODE (arg1) == INTEGER_CST)
1477 return int_const_binop (code, arg1, arg2, notrunc);
1479 if (TREE_CODE (arg1) == REAL_CST)
1481 enum machine_mode mode;
1484 REAL_VALUE_TYPE value;
1485 REAL_VALUE_TYPE result;
1489 d1 = TREE_REAL_CST (arg1);
1490 d2 = TREE_REAL_CST (arg2);
1492 type = TREE_TYPE (arg1);
1493 mode = TYPE_MODE (type);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode)
1498 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code == RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2, dconst0)
1505 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1))
1512 else if (REAL_VALUE_ISNAN (d2))
1515 inexact = real_arithmetic (&value, code, &d1, &d2);
1516 real_convert (&result, mode, &value);
1518 /* Don't constant fold this floating point operation if the
1519 result may dependent upon the run-time rounding mode and
1520 flag_rounding_math is set, or if GCC's software emulation
1521 is unable to accurately represent the result. */
1523 if ((flag_rounding_math
1524 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1525 && !flag_unsafe_math_optimizations))
1526 && (inexact || !real_identical (&result, &value)))
1529 t = build_real (type, result);
1531 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1532 TREE_CONSTANT_OVERFLOW (t)
1534 | TREE_CONSTANT_OVERFLOW (arg1)
1535 | TREE_CONSTANT_OVERFLOW (arg2);
1538 if (TREE_CODE (arg1) == COMPLEX_CST)
1540 tree type = TREE_TYPE (arg1);
1541 tree r1 = TREE_REALPART (arg1);
1542 tree i1 = TREE_IMAGPART (arg1);
1543 tree r2 = TREE_REALPART (arg2);
1544 tree i2 = TREE_IMAGPART (arg2);
1550 t = build_complex (type,
1551 const_binop (PLUS_EXPR, r1, r2, notrunc),
1552 const_binop (PLUS_EXPR, i1, i2, notrunc));
1556 t = build_complex (type,
1557 const_binop (MINUS_EXPR, r1, r2, notrunc),
1558 const_binop (MINUS_EXPR, i1, i2, notrunc));
1562 t = build_complex (type,
1563 const_binop (MINUS_EXPR,
1564 const_binop (MULT_EXPR,
1566 const_binop (MULT_EXPR,
1569 const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR,
1572 const_binop (MULT_EXPR,
1580 = const_binop (PLUS_EXPR,
1581 const_binop (MULT_EXPR, r2, r2, notrunc),
1582 const_binop (MULT_EXPR, i2, i2, notrunc),
1585 t = build_complex (type,
1587 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1588 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1589 const_binop (PLUS_EXPR,
1590 const_binop (MULT_EXPR, r1, r2,
1592 const_binop (MULT_EXPR, i1, i2,
1595 magsquared, notrunc),
1597 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1598 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1599 const_binop (MINUS_EXPR,
1600 const_binop (MULT_EXPR, i1, r2,
1602 const_binop (MULT_EXPR, r1, i2,
1605 magsquared, notrunc));
1617 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1618 indicates which particular sizetype to create. */
1621 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1623 return build_int_cst (sizetype_tab[(int) kind], number);
1626 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1627 is a tree code. The type of the result is taken from the operands.
1628 Both must be the same type integer type and it must be a size type.
1629 If the operands are constant, so is the result. */
1632 size_binop (enum tree_code code, tree arg0, tree arg1)
1634 tree type = TREE_TYPE (arg0);
1636 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1637 && type == TREE_TYPE (arg1));
1639 /* Handle the special case of two integer constants faster. */
1640 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1642 /* And some specific cases even faster than that. */
1643 if (code == PLUS_EXPR && integer_zerop (arg0))
1645 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1646 && integer_zerop (arg1))
1648 else if (code == MULT_EXPR && integer_onep (arg0))
1651 /* Handle general case of two integer constants. */
1652 return int_const_binop (code, arg0, arg1, 0);
1655 if (arg0 == error_mark_node || arg1 == error_mark_node)
1656 return error_mark_node;
1658 return fold (build2 (code, type, arg0, arg1));
1661 /* Given two values, either both of sizetype or both of bitsizetype,
1662 compute the difference between the two values. Return the value
1663 in signed type corresponding to the type of the operands. */
1666 size_diffop (tree arg0, tree arg1)
1668 tree type = TREE_TYPE (arg0);
1671 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1672 && type == TREE_TYPE (arg1));
1674 /* If the type is already signed, just do the simple thing. */
1675 if (!TYPE_UNSIGNED (type))
1676 return size_binop (MINUS_EXPR, arg0, arg1);
1678 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1680 /* If either operand is not a constant, do the conversions to the signed
1681 type and subtract. The hardware will do the right thing with any
1682 overflow in the subtraction. */
1683 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1684 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1685 fold_convert (ctype, arg1));
1687 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1688 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1689 overflow) and negate (which can't either). Special-case a result
1690 of zero while we're here. */
1691 if (tree_int_cst_equal (arg0, arg1))
1692 return fold_convert (ctype, integer_zero_node);
1693 else if (tree_int_cst_lt (arg1, arg0))
1694 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1696 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1697 fold_convert (ctype, size_binop (MINUS_EXPR,
1701 /* A subroutine of fold_convert_const handling conversions of an
1702 INTEGER_CST to another integer type. */
1705 fold_convert_const_int_from_int (tree type, tree arg1)
1709 /* Given an integer constant, make new constant with new type,
1710 appropriately sign-extended or truncated. */
1711 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1712 TREE_INT_CST_HIGH (arg1));
1714 t = force_fit_type (t,
1715 /* Don't set the overflow when
1716 converting a pointer */
1717 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1718 (TREE_INT_CST_HIGH (arg1) < 0
1719 && (TYPE_UNSIGNED (type)
1720 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1721 | TREE_OVERFLOW (arg1),
1722 TREE_CONSTANT_OVERFLOW (arg1));
1727 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1728 to an integer type. */
1731 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1736 /* The following code implements the floating point to integer
1737 conversion rules required by the Java Language Specification,
1738 that IEEE NaNs are mapped to zero and values that overflow
1739 the target precision saturate, i.e. values greater than
1740 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1741 are mapped to INT_MIN. These semantics are allowed by the
1742 C and C++ standards that simply state that the behavior of
1743 FP-to-integer conversion is unspecified upon overflow. */
1745 HOST_WIDE_INT high, low;
1747 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1751 case FIX_TRUNC_EXPR:
1752 real_trunc (&r, VOIDmode, &x);
1756 real_ceil (&r, VOIDmode, &x);
1759 case FIX_FLOOR_EXPR:
1760 real_floor (&r, VOIDmode, &x);
1763 case FIX_ROUND_EXPR:
1764 real_round (&r, VOIDmode, &x);
1771 /* If R is NaN, return zero and show we have an overflow. */
1772 if (REAL_VALUE_ISNAN (r))
1779 /* See if R is less than the lower bound or greater than the
1784 tree lt = TYPE_MIN_VALUE (type);
1785 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1786 if (REAL_VALUES_LESS (r, l))
1789 high = TREE_INT_CST_HIGH (lt);
1790 low = TREE_INT_CST_LOW (lt);
1796 tree ut = TYPE_MAX_VALUE (type);
1799 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1800 if (REAL_VALUES_LESS (u, r))
1803 high = TREE_INT_CST_HIGH (ut);
1804 low = TREE_INT_CST_LOW (ut);
1810 REAL_VALUE_TO_INT (&low, &high, r);
1812 t = build_int_cst_wide (type, low, high);
1814 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1815 TREE_CONSTANT_OVERFLOW (arg1));
1819 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1820 to another floating point type. */
1823 fold_convert_const_real_from_real (tree type, tree arg1)
1825 REAL_VALUE_TYPE value;
1828 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1829 t = build_real (type, value);
1831 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1832 TREE_CONSTANT_OVERFLOW (t)
1833 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1837 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1838 type TYPE. If no simplification can be done return NULL_TREE. */
1841 fold_convert_const (enum tree_code code, tree type, tree arg1)
1843 if (TREE_TYPE (arg1) == type)
1846 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return fold_convert_const_int_from_int (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_int_from_real (code, type, arg1);
1853 else if (TREE_CODE (type) == REAL_TYPE)
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1863 /* Construct a vector of zero elements of vector type TYPE. */
1866 build_zero_vector (tree type)
1871 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1872 units = TYPE_VECTOR_SUBPARTS (type);
1875 for (i = 0; i < units; i++)
1876 list = tree_cons (NULL_TREE, elem, list);
1877 return build_vector (type, list);
1880 /* Convert expression ARG to type TYPE. Used by the middle-end for
1881 simple conversions in preference to calling the front-end's convert. */
1884 fold_convert (tree type, tree arg)
1886 tree orig = TREE_TYPE (arg);
1892 if (TREE_CODE (arg) == ERROR_MARK
1893 || TREE_CODE (type) == ERROR_MARK
1894 || TREE_CODE (orig) == ERROR_MARK)
1895 return error_mark_node;
1897 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1898 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1899 TYPE_MAIN_VARIANT (orig)))
1900 return fold (build1 (NOP_EXPR, type, arg));
1902 switch (TREE_CODE (type))
1904 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1905 case POINTER_TYPE: case REFERENCE_TYPE:
1907 if (TREE_CODE (arg) == INTEGER_CST)
1909 tem = fold_convert_const (NOP_EXPR, type, arg);
1910 if (tem != NULL_TREE)
1913 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1914 || TREE_CODE (orig) == OFFSET_TYPE)
1915 return fold (build1 (NOP_EXPR, type, arg));
1916 if (TREE_CODE (orig) == COMPLEX_TYPE)
1918 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1919 return fold_convert (type, tem);
1921 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1922 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1923 return fold (build1 (NOP_EXPR, type, arg));
1926 if (TREE_CODE (arg) == INTEGER_CST)
1928 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1929 if (tem != NULL_TREE)
1932 else if (TREE_CODE (arg) == REAL_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1939 switch (TREE_CODE (orig))
1941 case INTEGER_TYPE: case CHAR_TYPE:
1942 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1943 case POINTER_TYPE: case REFERENCE_TYPE:
1944 return fold (build1 (FLOAT_EXPR, type, arg));
1947 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1951 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1952 return fold_convert (type, tem);
1959 switch (TREE_CODE (orig))
1961 case INTEGER_TYPE: case CHAR_TYPE:
1962 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1963 case POINTER_TYPE: case REFERENCE_TYPE:
1965 return build2 (COMPLEX_EXPR, type,
1966 fold_convert (TREE_TYPE (type), arg),
1967 fold_convert (TREE_TYPE (type), integer_zero_node));
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1974 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1975 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1976 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1979 arg = save_expr (arg);
1980 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1981 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1982 rpart = fold_convert (TREE_TYPE (type), rpart);
1983 ipart = fold_convert (TREE_TYPE (type), ipart);
1984 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1992 if (integer_zerop (arg))
1993 return build_zero_vector (type);
1994 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1995 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1996 || TREE_CODE (orig) == VECTOR_TYPE);
1997 return fold (build1 (NOP_EXPR, type, arg));
2000 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2007 /* Return an expr equal to X but certainly not valid as an lvalue. */
2012 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2017 /* We only need to wrap lvalue tree codes. */
2018 switch (TREE_CODE (x))
2029 case ALIGN_INDIRECT_REF:
2030 case MISALIGNED_INDIRECT_REF:
2032 case ARRAY_RANGE_REF:
2038 case PREINCREMENT_EXPR:
2039 case PREDECREMENT_EXPR:
2041 case TRY_CATCH_EXPR:
2042 case WITH_CLEANUP_EXPR:
2053 /* Assume the worst for front-end tree codes. */
2054 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2058 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2061 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2062 Zero means allow extended lvalues. */
2064 int pedantic_lvalues;
2066 /* When pedantic, return an expr equal to X but certainly not valid as a
2067 pedantic lvalue. Otherwise, return X. */
2070 pedantic_non_lvalue (tree x)
2072 if (pedantic_lvalues)
2073 return non_lvalue (x);
2078 /* Given a tree comparison code, return the code that is the logical inverse
2079 of the given code. It is not safe to do this for floating-point
2080 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2081 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2083 static enum tree_code
2084 invert_tree_comparison (enum tree_code code, bool honor_nans)
2086 if (honor_nans && flag_trapping_math)
2096 return honor_nans ? UNLE_EXPR : LE_EXPR;
2098 return honor_nans ? UNLT_EXPR : LT_EXPR;
2100 return honor_nans ? UNGE_EXPR : GE_EXPR;
2102 return honor_nans ? UNGT_EXPR : GT_EXPR;
2116 return UNORDERED_EXPR;
2117 case UNORDERED_EXPR:
2118 return ORDERED_EXPR;
2124 /* Similar, but return the comparison that results if the operands are
2125 swapped. This is safe for floating-point. */
2128 swap_tree_comparison (enum tree_code code)
2149 /* Convert a comparison tree code from an enum tree_code representation
2150 into a compcode bit-based encoding. This function is the inverse of
2151 compcode_to_comparison. */
2153 static enum comparison_code
2154 comparison_to_compcode (enum tree_code code)
2171 return COMPCODE_ORD;
2172 case UNORDERED_EXPR:
2173 return COMPCODE_UNORD;
2175 return COMPCODE_UNLT;
2177 return COMPCODE_UNEQ;
2179 return COMPCODE_UNLE;
2181 return COMPCODE_UNGT;
2183 return COMPCODE_LTGT;
2185 return COMPCODE_UNGE;
2191 /* Convert a compcode bit-based encoding of a comparison operator back
2192 to GCC's enum tree_code representation. This function is the
2193 inverse of comparison_to_compcode. */
2195 static enum tree_code
2196 compcode_to_comparison (enum comparison_code code)
2213 return ORDERED_EXPR;
2214 case COMPCODE_UNORD:
2215 return UNORDERED_EXPR;
2233 /* Return a tree for the comparison which is the combination of
2234 doing the AND or OR (depending on CODE) of the two operations LCODE
2235 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2236 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2237 if this makes the transformation invalid. */
2240 combine_comparisons (enum tree_code code, enum tree_code lcode,
2241 enum tree_code rcode, tree truth_type,
2242 tree ll_arg, tree lr_arg)
2244 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2245 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2246 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2247 enum comparison_code compcode;
2251 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2252 compcode = lcompcode & rcompcode;
2255 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2256 compcode = lcompcode | rcompcode;
2265 /* Eliminate unordered comparisons, as well as LTGT and ORD
2266 which are not used unless the mode has NaNs. */
2267 compcode &= ~COMPCODE_UNORD;
2268 if (compcode == COMPCODE_LTGT)
2269 compcode = COMPCODE_NE;
2270 else if (compcode == COMPCODE_ORD)
2271 compcode = COMPCODE_TRUE;
2273 else if (flag_trapping_math)
2275 /* Check that the original operation and the optimized ones will trap
2276 under the same condition. */
2277 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2278 && (lcompcode != COMPCODE_EQ)
2279 && (lcompcode != COMPCODE_ORD);
2280 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2281 && (rcompcode != COMPCODE_EQ)
2282 && (rcompcode != COMPCODE_ORD);
2283 bool trap = (compcode & COMPCODE_UNORD) == 0
2284 && (compcode != COMPCODE_EQ)
2285 && (compcode != COMPCODE_ORD);
2287 /* In a short-circuited boolean expression the LHS might be
2288 such that the RHS, if evaluated, will never trap. For
2289 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2290 if neither x nor y is NaN. (This is a mixed blessing: for
2291 example, the expression above will never trap, hence
2292 optimizing it to x < y would be invalid). */
2293 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2294 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2297 /* If the comparison was short-circuited, and only the RHS
2298 trapped, we may now generate a spurious trap. */
2300 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2303 /* If we changed the conditions that cause a trap, we lose. */
2304 if ((ltrap || rtrap) != trap)
2308 if (compcode == COMPCODE_TRUE)
2309 return constant_boolean_node (true, truth_type);
2310 else if (compcode == COMPCODE_FALSE)
2311 return constant_boolean_node (false, truth_type);
2313 return fold (build2 (compcode_to_comparison (compcode),
2314 truth_type, ll_arg, lr_arg));
2317 /* Return nonzero if CODE is a tree code that represents a truth value. */
2320 truth_value_p (enum tree_code code)
2322 return (TREE_CODE_CLASS (code) == tcc_comparison
2323 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2324 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2325 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2328 /* Return nonzero if two operands (typically of the same tree node)
2329 are necessarily equal. If either argument has side-effects this
2330 function returns zero. FLAGS modifies behavior as follows:
2332 If OEP_ONLY_CONST is set, only return nonzero for constants.
2333 This function tests whether the operands are indistinguishable;
2334 it does not test whether they are equal using C's == operation.
2335 The distinction is important for IEEE floating point, because
2336 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2337 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2339 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2340 even though it may hold multiple values during a function.
2341 This is because a GCC tree node guarantees that nothing else is
2342 executed between the evaluation of its "operands" (which may often
2343 be evaluated in arbitrary order). Hence if the operands themselves
2344 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2345 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2346 unset means assuming isochronic (or instantaneous) tree equivalence.
2347 Unless comparing arbitrary expression trees, such as from different
2348 statements, this flag can usually be left unset.
2350 If OEP_PURE_SAME is set, then pure functions with identical arguments
2351 are considered the same. It is used when the caller has other ways
2352 to ensure that global memory is unchanged in between. */
2355 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2361 /* If both types don't have the same signedness, then we can't consider
2362 them equal. We must check this before the STRIP_NOPS calls
2363 because they may change the signedness of the arguments. */
2364 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2370 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2371 /* This is needed for conversions and for COMPONENT_REF.
2372 Might as well play it safe and always test this. */
2373 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2374 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2375 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2378 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2379 We don't care about side effects in that case because the SAVE_EXPR
2380 takes care of that for us. In all other cases, two expressions are
2381 equal if they have no side effects. If we have two identical
2382 expressions with side effects that should be treated the same due
2383 to the only side effects being identical SAVE_EXPR's, that will
2384 be detected in the recursive calls below. */
2385 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2386 && (TREE_CODE (arg0) == SAVE_EXPR
2387 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2390 /* Next handle constant cases, those for which we can return 1 even
2391 if ONLY_CONST is set. */
2392 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2393 switch (TREE_CODE (arg0))
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && tree_int_cst_equal (arg0, arg1));
2401 return (! TREE_CONSTANT_OVERFLOW (arg0)
2402 && ! TREE_CONSTANT_OVERFLOW (arg1)
2403 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2404 TREE_REAL_CST (arg1)));
2410 if (TREE_CONSTANT_OVERFLOW (arg0)
2411 || TREE_CONSTANT_OVERFLOW (arg1))
2414 v1 = TREE_VECTOR_CST_ELTS (arg0);
2415 v2 = TREE_VECTOR_CST_ELTS (arg1);
2418 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2421 v1 = TREE_CHAIN (v1);
2422 v2 = TREE_CHAIN (v2);
2429 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2431 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2435 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2436 && ! memcmp (TREE_STRING_POINTER (arg0),
2437 TREE_STRING_POINTER (arg1),
2438 TREE_STRING_LENGTH (arg0)));
2441 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2447 if (flags & OEP_ONLY_CONST)
2450 /* Define macros to test an operand from arg0 and arg1 for equality and a
2451 variant that allows null and views null as being different from any
2452 non-null value. In the latter case, if either is null, the both
2453 must be; otherwise, do the normal comparison. */
2454 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2455 TREE_OPERAND (arg1, N), flags)
2457 #define OP_SAME_WITH_NULL(N) \
2458 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2459 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2461 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2464 /* Two conversions are equal only if signedness and modes match. */
2465 switch (TREE_CODE (arg0))
2470 case FIX_TRUNC_EXPR:
2471 case FIX_FLOOR_EXPR:
2472 case FIX_ROUND_EXPR:
2473 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2474 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2484 case tcc_comparison:
2486 if (OP_SAME (0) && OP_SAME (1))
2489 /* For commutative ops, allow the other order. */
2490 return (commutative_tree_code (TREE_CODE (arg0))
2491 && operand_equal_p (TREE_OPERAND (arg0, 0),
2492 TREE_OPERAND (arg1, 1), flags)
2493 && operand_equal_p (TREE_OPERAND (arg0, 1),
2494 TREE_OPERAND (arg1, 0), flags));
2497 /* If either of the pointer (or reference) expressions we are
2498 dereferencing contain a side effect, these cannot be equal. */
2499 if (TREE_SIDE_EFFECTS (arg0)
2500 || TREE_SIDE_EFFECTS (arg1))
2503 switch (TREE_CODE (arg0))
2506 case ALIGN_INDIRECT_REF:
2507 case MISALIGNED_INDIRECT_REF:
2513 case ARRAY_RANGE_REF:
2514 /* Operands 2 and 3 may be null. */
2517 && OP_SAME_WITH_NULL (2)
2518 && OP_SAME_WITH_NULL (3));
2521 /* Handle operand 2 the same as for ARRAY_REF. */
2522 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2525 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2531 case tcc_expression:
2532 switch (TREE_CODE (arg0))
2535 case TRUTH_NOT_EXPR:
2538 case TRUTH_ANDIF_EXPR:
2539 case TRUTH_ORIF_EXPR:
2540 return OP_SAME (0) && OP_SAME (1);
2542 case TRUTH_AND_EXPR:
2544 case TRUTH_XOR_EXPR:
2545 if (OP_SAME (0) && OP_SAME (1))
2548 /* Otherwise take into account this is a commutative operation. */
2549 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2550 TREE_OPERAND (arg1, 1), flags)
2551 && operand_equal_p (TREE_OPERAND (arg0, 1),
2552 TREE_OPERAND (arg1, 0), flags));
2555 /* If the CALL_EXPRs call different functions, then they
2556 clearly can not be equal. */
2561 unsigned int cef = call_expr_flags (arg0);
2562 if (flags & OEP_PURE_SAME)
2563 cef &= ECF_CONST | ECF_PURE;
2570 /* Now see if all the arguments are the same. operand_equal_p
2571 does not handle TREE_LIST, so we walk the operands here
2572 feeding them to operand_equal_p. */
2573 arg0 = TREE_OPERAND (arg0, 1);
2574 arg1 = TREE_OPERAND (arg1, 1);
2575 while (arg0 && arg1)
2577 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2581 arg0 = TREE_CHAIN (arg0);
2582 arg1 = TREE_CHAIN (arg1);
2585 /* If we get here and both argument lists are exhausted
2586 then the CALL_EXPRs are equal. */
2587 return ! (arg0 || arg1);
2593 case tcc_declaration:
2594 /* Consider __builtin_sqrt equal to sqrt. */
2595 return (TREE_CODE (arg0) == FUNCTION_DECL
2596 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2597 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2598 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2605 #undef OP_SAME_WITH_NULL
2608 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2609 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2611 When in doubt, return 0. */
2614 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2616 int unsignedp1, unsignedpo;
2617 tree primarg0, primarg1, primother;
2618 unsigned int correct_width;
2620 if (operand_equal_p (arg0, arg1, 0))
2623 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2624 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2627 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2628 and see if the inner values are the same. This removes any
2629 signedness comparison, which doesn't matter here. */
2630 primarg0 = arg0, primarg1 = arg1;
2631 STRIP_NOPS (primarg0);
2632 STRIP_NOPS (primarg1);
2633 if (operand_equal_p (primarg0, primarg1, 0))
2636 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2637 actual comparison operand, ARG0.
2639 First throw away any conversions to wider types
2640 already present in the operands. */
2642 primarg1 = get_narrower (arg1, &unsignedp1);
2643 primother = get_narrower (other, &unsignedpo);
2645 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2646 if (unsignedp1 == unsignedpo
2647 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2648 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2650 tree type = TREE_TYPE (arg0);
2652 /* Make sure shorter operand is extended the right way
2653 to match the longer operand. */
2654 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2655 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2657 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2664 /* See if ARG is an expression that is either a comparison or is performing
2665 arithmetic on comparisons. The comparisons must only be comparing
2666 two different values, which will be stored in *CVAL1 and *CVAL2; if
2667 they are nonzero it means that some operands have already been found.
2668 No variables may be used anywhere else in the expression except in the
2669 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2670 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2672 If this is true, return 1. Otherwise, return zero. */
2675 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2677 enum tree_code code = TREE_CODE (arg);
2678 enum tree_code_class class = TREE_CODE_CLASS (code);
2680 /* We can handle some of the tcc_expression cases here. */
2681 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2683 else if (class == tcc_expression
2684 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2685 || code == COMPOUND_EXPR))
2688 else if (class == tcc_expression && code == SAVE_EXPR
2689 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2691 /* If we've already found a CVAL1 or CVAL2, this expression is
2692 two complex to handle. */
2693 if (*cval1 || *cval2)
2703 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2706 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2707 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2708 cval1, cval2, save_p));
2713 case tcc_expression:
2714 if (code == COND_EXPR)
2715 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2716 cval1, cval2, save_p)
2717 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2718 cval1, cval2, save_p)
2719 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2720 cval1, cval2, save_p));
2723 case tcc_comparison:
2724 /* First see if we can handle the first operand, then the second. For
2725 the second operand, we know *CVAL1 can't be zero. It must be that
2726 one side of the comparison is each of the values; test for the
2727 case where this isn't true by failing if the two operands
2730 if (operand_equal_p (TREE_OPERAND (arg, 0),
2731 TREE_OPERAND (arg, 1), 0))
2735 *cval1 = TREE_OPERAND (arg, 0);
2736 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2738 else if (*cval2 == 0)
2739 *cval2 = TREE_OPERAND (arg, 0);
2740 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2745 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2747 else if (*cval2 == 0)
2748 *cval2 = TREE_OPERAND (arg, 1);
2749 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2761 /* ARG is a tree that is known to contain just arithmetic operations and
2762 comparisons. Evaluate the operations in the tree substituting NEW0 for
2763 any occurrence of OLD0 as an operand of a comparison and likewise for
2767 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2769 tree type = TREE_TYPE (arg);
2770 enum tree_code code = TREE_CODE (arg);
2771 enum tree_code_class class = TREE_CODE_CLASS (code);
2773 /* We can handle some of the tcc_expression cases here. */
2774 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2776 else if (class == tcc_expression
2777 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2783 return fold (build1 (code, type,
2784 eval_subst (TREE_OPERAND (arg, 0),
2785 old0, new0, old1, new1)));
2788 return fold (build2 (code, type,
2789 eval_subst (TREE_OPERAND (arg, 0),
2790 old0, new0, old1, new1),
2791 eval_subst (TREE_OPERAND (arg, 1),
2792 old0, new0, old1, new1)));
2794 case tcc_expression:
2798 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2801 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2804 return fold (build3 (code, type,
2805 eval_subst (TREE_OPERAND (arg, 0),
2806 old0, new0, old1, new1),
2807 eval_subst (TREE_OPERAND (arg, 1),
2808 old0, new0, old1, new1),
2809 eval_subst (TREE_OPERAND (arg, 2),
2810 old0, new0, old1, new1)));
2814 /* Fall through - ??? */
2816 case tcc_comparison:
2818 tree arg0 = TREE_OPERAND (arg, 0);
2819 tree arg1 = TREE_OPERAND (arg, 1);
2821 /* We need to check both for exact equality and tree equality. The
2822 former will be true if the operand has a side-effect. In that
2823 case, we know the operand occurred exactly once. */
2825 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2827 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2830 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2832 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2835 return fold (build2 (code, type, arg0, arg1));
2843 /* Return a tree for the case when the result of an expression is RESULT
2844 converted to TYPE and OMITTED was previously an operand of the expression
2845 but is now not needed (e.g., we folded OMITTED * 0).
2847 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2848 the conversion of RESULT to TYPE. */
2851 omit_one_operand (tree type, tree result, tree omitted)
2853 tree t = fold_convert (type, result);
2855 if (TREE_SIDE_EFFECTS (omitted))
2856 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2858 return non_lvalue (t);
2861 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2864 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2866 tree t = fold_convert (type, result);
2868 if (TREE_SIDE_EFFECTS (omitted))
2869 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2871 return pedantic_non_lvalue (t);
2874 /* Return a tree for the case when the result of an expression is RESULT
2875 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2876 of the expression but are now not needed.
2878 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2879 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2880 evaluated before OMITTED2. Otherwise, if neither has side effects,
2881 just do the conversion of RESULT to TYPE. */
2884 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2886 tree t = fold_convert (type, result);
2888 if (TREE_SIDE_EFFECTS (omitted2))
2889 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2890 if (TREE_SIDE_EFFECTS (omitted1))
2891 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2893 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2897 /* Return a simplified tree node for the truth-negation of ARG. This
2898 never alters ARG itself. We assume that ARG is an operation that
2899 returns a truth value (0 or 1).
2901 FIXME: one would think we would fold the result, but it causes
2902 problems with the dominator optimizer. */
2904 invert_truthvalue (tree arg)
2906 tree type = TREE_TYPE (arg);
2907 enum tree_code code = TREE_CODE (arg);
2909 if (code == ERROR_MARK)
2912 /* If this is a comparison, we can simply invert it, except for
2913 floating-point non-equality comparisons, in which case we just
2914 enclose a TRUTH_NOT_EXPR around what we have. */
2916 if (TREE_CODE_CLASS (code) == tcc_comparison)
2918 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2919 if (FLOAT_TYPE_P (op_type)
2920 && flag_trapping_math
2921 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2922 && code != NE_EXPR && code != EQ_EXPR)
2923 return build1 (TRUTH_NOT_EXPR, type, arg);
2926 code = invert_tree_comparison (code,
2927 HONOR_NANS (TYPE_MODE (op_type)));
2928 if (code == ERROR_MARK)
2929 return build1 (TRUTH_NOT_EXPR, type, arg);
2931 return build2 (code, type,
2932 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2939 return fold_convert (type,
2940 build_int_cst (NULL_TREE, integer_zerop (arg)));
2942 case TRUTH_AND_EXPR:
2943 return build2 (TRUTH_OR_EXPR, type,
2944 invert_truthvalue (TREE_OPERAND (arg, 0)),
2945 invert_truthvalue (TREE_OPERAND (arg, 1)));
2948 return build2 (TRUTH_AND_EXPR, type,
2949 invert_truthvalue (TREE_OPERAND (arg, 0)),
2950 invert_truthvalue (TREE_OPERAND (arg, 1)));
2952 case TRUTH_XOR_EXPR:
2953 /* Here we can invert either operand. We invert the first operand
2954 unless the second operand is a TRUTH_NOT_EXPR in which case our
2955 result is the XOR of the first operand with the inside of the
2956 negation of the second operand. */
2958 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2959 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2960 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2962 return build2 (TRUTH_XOR_EXPR, type,
2963 invert_truthvalue (TREE_OPERAND (arg, 0)),
2964 TREE_OPERAND (arg, 1));
2966 case TRUTH_ANDIF_EXPR:
2967 return build2 (TRUTH_ORIF_EXPR, type,
2968 invert_truthvalue (TREE_OPERAND (arg, 0)),
2969 invert_truthvalue (TREE_OPERAND (arg, 1)));
2971 case TRUTH_ORIF_EXPR:
2972 return build2 (TRUTH_ANDIF_EXPR, type,
2973 invert_truthvalue (TREE_OPERAND (arg, 0)),
2974 invert_truthvalue (TREE_OPERAND (arg, 1)));
2976 case TRUTH_NOT_EXPR:
2977 return TREE_OPERAND (arg, 0);
2980 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2981 invert_truthvalue (TREE_OPERAND (arg, 1)),
2982 invert_truthvalue (TREE_OPERAND (arg, 2)));
2985 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2986 invert_truthvalue (TREE_OPERAND (arg, 1)));
2988 case NON_LVALUE_EXPR:
2989 return invert_truthvalue (TREE_OPERAND (arg, 0));
2992 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2997 return build1 (TREE_CODE (arg), type,
2998 invert_truthvalue (TREE_OPERAND (arg, 0)));
3001 if (!integer_onep (TREE_OPERAND (arg, 1)))
3003 return build2 (EQ_EXPR, type, arg,
3004 fold_convert (type, integer_zero_node));
3007 return build1 (TRUTH_NOT_EXPR, type, arg);
3009 case CLEANUP_POINT_EXPR:
3010 return build1 (CLEANUP_POINT_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)));
3016 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3017 return build1 (TRUTH_NOT_EXPR, type, arg);
3020 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3021 operands are another bit-wise operation with a common input. If so,
3022 distribute the bit operations to save an operation and possibly two if
3023 constants are involved. For example, convert
3024 (A | B) & (A | C) into A | (B & C)
3025 Further simplification will occur if B and C are constants.
3027 If this optimization cannot be done, 0 will be returned. */
3030 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3035 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3036 || TREE_CODE (arg0) == code
3037 || (TREE_CODE (arg0) != BIT_AND_EXPR
3038 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3041 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3043 common = TREE_OPERAND (arg0, 0);
3044 left = TREE_OPERAND (arg0, 1);
3045 right = TREE_OPERAND (arg1, 1);
3047 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3049 common = TREE_OPERAND (arg0, 0);
3050 left = TREE_OPERAND (arg0, 1);
3051 right = TREE_OPERAND (arg1, 0);
3053 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3055 common = TREE_OPERAND (arg0, 1);
3056 left = TREE_OPERAND (arg0, 0);
3057 right = TREE_OPERAND (arg1, 1);
3059 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3061 common = TREE_OPERAND (arg0, 1);
3062 left = TREE_OPERAND (arg0, 0);
3063 right = TREE_OPERAND (arg1, 0);
3068 return fold (build2 (TREE_CODE (arg0), type, common,
3069 fold (build2 (code, type, left, right))));
3072 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3073 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3076 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3083 tree size = TYPE_SIZE (TREE_TYPE (inner));
3084 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3085 || POINTER_TYPE_P (TREE_TYPE (inner)))
3086 && host_integerp (size, 0)
3087 && tree_low_cst (size, 0) == bitsize)
3088 return fold_convert (type, inner);
3091 result = build3 (BIT_FIELD_REF, type, inner,
3092 size_int (bitsize), bitsize_int (bitpos));
3094 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3099 /* Optimize a bit-field compare.
3101 There are two cases: First is a compare against a constant and the
3102 second is a comparison of two items where the fields are at the same
3103 bit position relative to the start of a chunk (byte, halfword, word)
3104 large enough to contain it. In these cases we can avoid the shift
3105 implicit in bitfield extractions.
3107 For constants, we emit a compare of the shifted constant with the
3108 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3109 compared. For two fields at the same position, we do the ANDs with the
3110 similar mask and compare the result of the ANDs.
3112 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3113 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3114 are the left and right operands of the comparison, respectively.
3116 If the optimization described above can be done, we return the resulting
3117 tree. Otherwise we return zero. */
3120 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3123 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3124 tree type = TREE_TYPE (lhs);
3125 tree signed_type, unsigned_type;
3126 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3127 enum machine_mode lmode, rmode, nmode;
3128 int lunsignedp, runsignedp;
3129 int lvolatilep = 0, rvolatilep = 0;
3130 tree linner, rinner = NULL_TREE;
3134 /* Get all the information about the extractions being done. If the bit size
3135 if the same as the size of the underlying object, we aren't doing an
3136 extraction at all and so can do nothing. We also don't want to
3137 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3138 then will no longer be able to replace it. */
3139 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3140 &lunsignedp, &lvolatilep, false);
3141 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3142 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3147 /* If this is not a constant, we can only do something if bit positions,
3148 sizes, and signedness are the same. */
3149 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3150 &runsignedp, &rvolatilep, false);
3152 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3153 || lunsignedp != runsignedp || offset != 0
3154 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3158 /* See if we can find a mode to refer to this field. We should be able to,
3159 but fail if we can't. */
3160 nmode = get_best_mode (lbitsize, lbitpos,
3161 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3162 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3163 TYPE_ALIGN (TREE_TYPE (rinner))),
3164 word_mode, lvolatilep || rvolatilep);
3165 if (nmode == VOIDmode)
3168 /* Set signed and unsigned types of the precision of this mode for the
3170 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3171 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3173 /* Compute the bit position and size for the new reference and our offset
3174 within it. If the new reference is the same size as the original, we
3175 won't optimize anything, so return zero. */
3176 nbitsize = GET_MODE_BITSIZE (nmode);
3177 nbitpos = lbitpos & ~ (nbitsize - 1);
3179 if (nbitsize == lbitsize)
3182 if (BYTES_BIG_ENDIAN)
3183 lbitpos = nbitsize - lbitsize - lbitpos;
3185 /* Make the mask to be used against the extracted field. */
3186 mask = build_int_cst (unsigned_type, -1);
3187 mask = force_fit_type (mask, 0, false, false);
3188 mask = fold_convert (unsigned_type, mask);
3189 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3190 mask = const_binop (RSHIFT_EXPR, mask,
3191 size_int (nbitsize - lbitsize - lbitpos), 0);
3194 /* If not comparing with constant, just rework the comparison
3196 return build2 (code, compare_type,
3197 build2 (BIT_AND_EXPR, unsigned_type,
3198 make_bit_field_ref (linner, unsigned_type,
3199 nbitsize, nbitpos, 1),
3201 build2 (BIT_AND_EXPR, unsigned_type,
3202 make_bit_field_ref (rinner, unsigned_type,
3203 nbitsize, nbitpos, 1),
3206 /* Otherwise, we are handling the constant case. See if the constant is too
3207 big for the field. Warn and return a tree of for 0 (false) if so. We do
3208 this not only for its own sake, but to avoid having to test for this
3209 error case below. If we didn't, we might generate wrong code.
3211 For unsigned fields, the constant shifted right by the field length should
3212 be all zero. For signed fields, the high-order bits should agree with
3217 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3218 fold_convert (unsigned_type, rhs),
3219 size_int (lbitsize), 0)))
3221 warning ("comparison is always %d due to width of bit-field",
3223 return constant_boolean_node (code == NE_EXPR, compare_type);
3228 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3229 size_int (lbitsize - 1), 0);
3230 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3232 warning ("comparison is always %d due to width of bit-field",
3234 return constant_boolean_node (code == NE_EXPR, compare_type);
3238 /* Single-bit compares should always be against zero. */
3239 if (lbitsize == 1 && ! integer_zerop (rhs))
3241 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3242 rhs = fold_convert (type, integer_zero_node);
3245 /* Make a new bitfield reference, shift the constant over the
3246 appropriate number of bits and mask it with the computed mask
3247 (in case this was a signed field). If we changed it, make a new one. */
3248 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3251 TREE_SIDE_EFFECTS (lhs) = 1;
3252 TREE_THIS_VOLATILE (lhs) = 1;
3255 rhs = fold (const_binop (BIT_AND_EXPR,
3256 const_binop (LSHIFT_EXPR,
3257 fold_convert (unsigned_type, rhs),
3258 size_int (lbitpos), 0),
3261 return build2 (code, compare_type,
3262 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3266 /* Subroutine for fold_truthop: decode a field reference.
3268 If EXP is a comparison reference, we return the innermost reference.
3270 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3271 set to the starting bit number.
3273 If the innermost field can be completely contained in a mode-sized
3274 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3276 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3277 otherwise it is not changed.
3279 *PUNSIGNEDP is set to the signedness of the field.
3281 *PMASK is set to the mask used. This is either contained in a
3282 BIT_AND_EXPR or derived from the width of the field.
3284 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3286 Return 0 if this is not a component reference or is one that we can't
3287 do anything with. */
3290 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3291 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3292 int *punsignedp, int *pvolatilep,
3293 tree *pmask, tree *pand_mask)
3295 tree outer_type = 0;
3297 tree mask, inner, offset;
3299 unsigned int precision;
3301 /* All the optimizations using this function assume integer fields.
3302 There are problems with FP fields since the type_for_size call
3303 below can fail for, e.g., XFmode. */
3304 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3307 /* We are interested in the bare arrangement of bits, so strip everything
3308 that doesn't affect the machine mode. However, record the type of the
3309 outermost expression if it may matter below. */
3310 if (TREE_CODE (exp) == NOP_EXPR
3311 || TREE_CODE (exp) == CONVERT_EXPR
3312 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3313 outer_type = TREE_TYPE (exp);
3316 if (TREE_CODE (exp) == BIT_AND_EXPR)
3318 and_mask = TREE_OPERAND (exp, 1);
3319 exp = TREE_OPERAND (exp, 0);
3320 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3321 if (TREE_CODE (and_mask) != INTEGER_CST)
3325 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3326 punsignedp, pvolatilep, false);
3327 if ((inner == exp && and_mask == 0)
3328 || *pbitsize < 0 || offset != 0
3329 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3332 /* If the number of bits in the reference is the same as the bitsize of
3333 the outer type, then the outer type gives the signedness. Otherwise
3334 (in case of a small bitfield) the signedness is unchanged. */
3335 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3336 *punsignedp = TYPE_UNSIGNED (outer_type);
3338 /* Compute the mask to access the bitfield. */
3339 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3340 precision = TYPE_PRECISION (unsigned_type);
3342 mask = build_int_cst (unsigned_type, -1);
3343 mask = force_fit_type (mask, 0, false, false);
3345 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3346 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3348 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3350 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3351 fold_convert (unsigned_type, and_mask), mask));
3354 *pand_mask = and_mask;
3358 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3362 all_ones_mask_p (tree mask, int size)
3364 tree type = TREE_TYPE (mask);
3365 unsigned int precision = TYPE_PRECISION (type);
3368 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3369 tmask = force_fit_type (tmask, 0, false, false);
3372 tree_int_cst_equal (mask,
3373 const_binop (RSHIFT_EXPR,
3374 const_binop (LSHIFT_EXPR, tmask,
3375 size_int (precision - size),
3377 size_int (precision - size), 0));
3380 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3381 represents the sign bit of EXP's type. If EXP represents a sign
3382 or zero extension, also test VAL against the unextended type.
3383 The return value is the (sub)expression whose sign bit is VAL,
3384 or NULL_TREE otherwise. */
3387 sign_bit_p (tree exp, tree val)
3389 unsigned HOST_WIDE_INT mask_lo, lo;
3390 HOST_WIDE_INT mask_hi, hi;
3394 /* Tree EXP must have an integral type. */
3395 t = TREE_TYPE (exp);
3396 if (! INTEGRAL_TYPE_P (t))
3399 /* Tree VAL must be an integer constant. */
3400 if (TREE_CODE (val) != INTEGER_CST
3401 || TREE_CONSTANT_OVERFLOW (val))
3404 width = TYPE_PRECISION (t);
3405 if (width > HOST_BITS_PER_WIDE_INT)
3407 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3410 mask_hi = ((unsigned HOST_WIDE_INT) -1
3411 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3417 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3420 mask_lo = ((unsigned HOST_WIDE_INT) -1
3421 >> (HOST_BITS_PER_WIDE_INT - width));
3424 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3425 treat VAL as if it were unsigned. */
3426 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3427 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3430 /* Handle extension from a narrower type. */
3431 if (TREE_CODE (exp) == NOP_EXPR
3432 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3433 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3438 /* Subroutine for fold_truthop: determine if an operand is simple enough
3439 to be evaluated unconditionally. */
3442 simple_operand_p (tree exp)
3444 /* Strip any conversions that don't change the machine mode. */
3447 return (CONSTANT_CLASS_P (exp)
3448 || TREE_CODE (exp) == SSA_NAME
3450 && ! TREE_ADDRESSABLE (exp)
3451 && ! TREE_THIS_VOLATILE (exp)
3452 && ! DECL_NONLOCAL (exp)
3453 /* Don't regard global variables as simple. They may be
3454 allocated in ways unknown to the compiler (shared memory,
3455 #pragma weak, etc). */
3456 && ! TREE_PUBLIC (exp)
3457 && ! DECL_EXTERNAL (exp)
3458 /* Loading a static variable is unduly expensive, but global
3459 registers aren't expensive. */
3460 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3463 /* The following functions are subroutines to fold_range_test and allow it to
3464 try to change a logical combination of comparisons into a range test.
3467 X == 2 || X == 3 || X == 4 || X == 5
3471 (unsigned) (X - 2) <= 3
3473 We describe each set of comparisons as being either inside or outside
3474 a range, using a variable named like IN_P, and then describe the
3475 range with a lower and upper bound. If one of the bounds is omitted,
3476 it represents either the highest or lowest value of the type.
3478 In the comments below, we represent a range by two numbers in brackets
3479 preceded by a "+" to designate being inside that range, or a "-" to
3480 designate being outside that range, so the condition can be inverted by
3481 flipping the prefix. An omitted bound is represented by a "-". For
3482 example, "- [-, 10]" means being outside the range starting at the lowest
3483 possible value and ending at 10, in other words, being greater than 10.
3484 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3487 We set up things so that the missing bounds are handled in a consistent
3488 manner so neither a missing bound nor "true" and "false" need to be
3489 handled using a special case. */
3491 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3492 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3493 and UPPER1_P are nonzero if the respective argument is an upper bound
3494 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3495 must be specified for a comparison. ARG1 will be converted to ARG0's
3496 type if both are specified. */
3499 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3500 tree arg1, int upper1_p)
3506 /* If neither arg represents infinity, do the normal operation.
3507 Else, if not a comparison, return infinity. Else handle the special
3508 comparison rules. Note that most of the cases below won't occur, but
3509 are handled for consistency. */
3511 if (arg0 != 0 && arg1 != 0)
3513 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3514 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3516 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3519 if (TREE_CODE_CLASS (code) != tcc_comparison)
3522 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3523 for neither. In real maths, we cannot assume open ended ranges are
3524 the same. But, this is computer arithmetic, where numbers are finite.
3525 We can therefore make the transformation of any unbounded range with
3526 the value Z, Z being greater than any representable number. This permits
3527 us to treat unbounded ranges as equal. */
3528 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3529 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3533 result = sgn0 == sgn1;
3536 result = sgn0 != sgn1;
3539 result = sgn0 < sgn1;
3542 result = sgn0 <= sgn1;
3545 result = sgn0 > sgn1;
3548 result = sgn0 >= sgn1;
3554 return constant_boolean_node (result, type);
3557 /* Given EXP, a logical expression, set the range it is testing into
3558 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3559 actually being tested. *PLOW and *PHIGH will be made of the same type
3560 as the returned expression. If EXP is not a comparison, we will most
3561 likely not be returning a useful value and range. */
3564 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3566 enum tree_code code;
3567 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3568 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3570 tree low, high, n_low, n_high;
3572 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3573 and see if we can refine the range. Some of the cases below may not
3574 happen, but it doesn't seem worth worrying about this. We "continue"
3575 the outer loop when we've changed something; otherwise we "break"
3576 the switch, which will "break" the while. */
3579 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3583 code = TREE_CODE (exp);
3584 exp_type = TREE_TYPE (exp);
3586 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3588 if (TREE_CODE_LENGTH (code) > 0)
3589 arg0 = TREE_OPERAND (exp, 0);
3590 if (TREE_CODE_CLASS (code) == tcc_comparison
3591 || TREE_CODE_CLASS (code) == tcc_unary
3592 || TREE_CODE_CLASS (code) == tcc_binary)
3593 arg0_type = TREE_TYPE (arg0);
3594 if (TREE_CODE_CLASS (code) == tcc_binary
3595 || TREE_CODE_CLASS (code) == tcc_comparison
3596 || (TREE_CODE_CLASS (code) == tcc_expression
3597 && TREE_CODE_LENGTH (code) > 1))
3598 arg1 = TREE_OPERAND (exp, 1);
3603 case TRUTH_NOT_EXPR:
3604 in_p = ! in_p, exp = arg0;
3607 case EQ_EXPR: case NE_EXPR:
3608 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3609 /* We can only do something if the range is testing for zero
3610 and if the second operand is an integer constant. Note that
3611 saying something is "in" the range we make is done by
3612 complementing IN_P since it will set in the initial case of
3613 being not equal to zero; "out" is leaving it alone. */
3614 if (low == 0 || high == 0
3615 || ! integer_zerop (low) || ! integer_zerop (high)
3616 || TREE_CODE (arg1) != INTEGER_CST)
3621 case NE_EXPR: /* - [c, c] */
3624 case EQ_EXPR: /* + [c, c] */
3625 in_p = ! in_p, low = high = arg1;
3627 case GT_EXPR: /* - [-, c] */
3628 low = 0, high = arg1;
3630 case GE_EXPR: /* + [c, -] */
3631 in_p = ! in_p, low = arg1, high = 0;
3633 case LT_EXPR: /* - [c, -] */
3634 low = arg1, high = 0;
3636 case LE_EXPR: /* + [-, c] */
3637 in_p = ! in_p, low = 0, high = arg1;
3643 /* If this is an unsigned comparison, we also know that EXP is
3644 greater than or equal to zero. We base the range tests we make
3645 on that fact, so we record it here so we can parse existing
3646 range tests. We test arg0_type since often the return type
3647 of, e.g. EQ_EXPR, is boolean. */
3648 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3650 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3652 fold_convert (arg0_type, integer_zero_node),
3656 in_p = n_in_p, low = n_low, high = n_high;
3658 /* If the high bound is missing, but we have a nonzero low
3659 bound, reverse the range so it goes from zero to the low bound
3661 if (high == 0 && low && ! integer_zerop (low))
3664 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3665 integer_one_node, 0);
3666 low = fold_convert (arg0_type, integer_zero_node);
3674 /* (-x) IN [a,b] -> x in [-b, -a] */
3675 n_low = range_binop (MINUS_EXPR, exp_type,
3676 fold_convert (exp_type, integer_zero_node),
3678 n_high = range_binop (MINUS_EXPR, exp_type,
3679 fold_convert (exp_type, integer_zero_node),
3681 low = n_low, high = n_high;
3687 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3688 fold_convert (exp_type, integer_one_node));
3691 case PLUS_EXPR: case MINUS_EXPR:
3692 if (TREE_CODE (arg1) != INTEGER_CST)
3695 /* If EXP is signed, any overflow in the computation is undefined,
3696 so we don't worry about it so long as our computations on
3697 the bounds don't overflow. For unsigned, overflow is defined
3698 and this is exactly the right thing. */
3699 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3700 arg0_type, low, 0, arg1, 0);
3701 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3702 arg0_type, high, 1, arg1, 0);
3703 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3704 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3707 /* Check for an unsigned range which has wrapped around the maximum
3708 value thus making n_high < n_low, and normalize it. */
3709 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3711 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3712 integer_one_node, 0);
3713 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3714 integer_one_node, 0);
3716 /* If the range is of the form +/- [ x+1, x ], we won't
3717 be able to normalize it. But then, it represents the
3718 whole range or the empty set, so make it
3720 if (tree_int_cst_equal (n_low, low)
3721 && tree_int_cst_equal (n_high, high))
3727 low = n_low, high = n_high;
3732 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3733 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3736 if (! INTEGRAL_TYPE_P (arg0_type)
3737 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3738 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3741 n_low = low, n_high = high;
3744 n_low = fold_convert (arg0_type, n_low);
3747 n_high = fold_convert (arg0_type, n_high);
3750 /* If we're converting arg0 from an unsigned type, to exp,
3751 a signed type, we will be doing the comparison as unsigned.
3752 The tests above have already verified that LOW and HIGH
3755 So we have to ensure that we will handle large unsigned
3756 values the same way that the current signed bounds treat
3759 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3762 tree equiv_type = lang_hooks.types.type_for_mode
3763 (TYPE_MODE (arg0_type), 1);
3765 /* A range without an upper bound is, naturally, unbounded.
3766 Since convert would have cropped a very large value, use
3767 the max value for the destination type. */
3769 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3770 : TYPE_MAX_VALUE (arg0_type);
3772 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3773 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3774 fold_convert (arg0_type,
3776 fold_convert (arg0_type,
3777 integer_one_node)));
3779 /* If the low bound is specified, "and" the range with the
3780 range for which the original unsigned value will be
3784 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3785 1, n_low, n_high, 1,
3786 fold_convert (arg0_type,
3791 in_p = (n_in_p == in_p);
3795 /* Otherwise, "or" the range with the range of the input
3796 that will be interpreted as negative. */
3797 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3798 0, n_low, n_high, 1,
3799 fold_convert (arg0_type,
3804 in_p = (in_p != n_in_p);
3809 low = n_low, high = n_high;
3819 /* If EXP is a constant, we can evaluate whether this is true or false. */
3820 if (TREE_CODE (exp) == INTEGER_CST)
3822 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3824 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3830 *pin_p = in_p, *plow = low, *phigh = high;
3834 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3835 type, TYPE, return an expression to test if EXP is in (or out of, depending
3836 on IN_P) the range. Return 0 if the test couldn't be created. */
3839 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3841 tree etype = TREE_TYPE (exp);
3846 value = build_range_check (type, exp, 1, low, high);
3848 return invert_truthvalue (value);
3853 if (low == 0 && high == 0)
3854 return fold_convert (type, integer_one_node);
3857 return fold (build2 (LE_EXPR, type, exp, high));
3860 return fold (build2 (GE_EXPR, type, exp, low));
3862 if (operand_equal_p (low, high, 0))
3863 return fold (build2 (EQ_EXPR, type, exp, low));
3865 if (integer_zerop (low))
3867 if (! TYPE_UNSIGNED (etype))
3869 etype = lang_hooks.types.unsigned_type (etype);
3870 high = fold_convert (etype, high);
3871 exp = fold_convert (etype, exp);
3873 return build_range_check (type, exp, 1, 0, high);
3876 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3877 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3879 unsigned HOST_WIDE_INT lo;
3883 prec = TYPE_PRECISION (etype);
3884 if (prec <= HOST_BITS_PER_WIDE_INT)
3887 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3891 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3892 lo = (unsigned HOST_WIDE_INT) -1;
3895 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3897 if (TYPE_UNSIGNED (etype))
3899 etype = lang_hooks.types.signed_type (etype);
3900 exp = fold_convert (etype, exp);
3902 return fold (build2 (GT_EXPR, type, exp,
3903 fold_convert (etype, integer_zero_node)));
3907 value = const_binop (MINUS_EXPR, high, low, 0);
3908 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3910 tree utype, minv, maxv;
3912 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3913 for the type in question, as we rely on this here. */
3914 switch (TREE_CODE (etype))
3919 utype = lang_hooks.types.unsigned_type (etype);
3920 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3921 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3922 integer_one_node, 1);
3923 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3924 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3928 high = fold_convert (etype, high);
3929 low = fold_convert (etype, low);
3930 exp = fold_convert (etype, exp);
3931 value = const_binop (MINUS_EXPR, high, low, 0);
3939 if (value != 0 && ! TREE_OVERFLOW (value))
3940 return build_range_check (type,
3941 fold (build2 (MINUS_EXPR, etype, exp, low)),
3942 1, fold_convert (etype, integer_zero_node),
3948 /* Given two ranges, see if we can merge them into one. Return 1 if we
3949 can, 0 if we can't. Set the output range into the specified parameters. */
3952 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3953 tree high0, int in1_p, tree low1, tree high1)
3961 int lowequal = ((low0 == 0 && low1 == 0)
3962 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3963 low0, 0, low1, 0)));
3964 int highequal = ((high0 == 0 && high1 == 0)
3965 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3966 high0, 1, high1, 1)));
3968 /* Make range 0 be the range that starts first, or ends last if they
3969 start at the same value. Swap them if it isn't. */
3970 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3973 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3974 high1, 1, high0, 1))))
3976 temp = in0_p, in0_p = in1_p, in1_p = temp;
3977 tem = low0, low0 = low1, low1 = tem;
3978 tem = high0, high0 = high1, high1 = tem;
3981 /* Now flag two cases, whether the ranges are disjoint or whether the
3982 second range is totally subsumed in the first. Note that the tests
3983 below are simplified by the ones above. */
3984 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3985 high0, 1, low1, 0));
3986 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3987 high1, 1, high0, 1));
3989 /* We now have four cases, depending on whether we are including or
3990 excluding the two ranges. */
3993 /* If they don't overlap, the result is false. If the second range
3994 is a subset it is the result. Otherwise, the range is from the start
3995 of the second to the end of the first. */
3997 in_p = 0, low = high = 0;
3999 in_p = 1, low = low1, high = high1;
4001 in_p = 1, low = low1, high = high0;
4004 else if (in0_p && ! in1_p)
4006 /* If they don't overlap, the result is the first range. If they are
4007 equal, the result is false. If the second range is a subset of the
4008 first, and the ranges begin at the same place, we go from just after
4009 the end of the first range to the end of the second. If the second
4010 range is not a subset of the first, or if it is a subset and both
4011 ranges end at the same place, the range starts at the start of the
4012 first range and ends just before the second range.
4013 Otherwise, we can't describe this as a single range. */
4015 in_p = 1, low = low0, high = high0;
4016 else if (lowequal && highequal)
4017 in_p = 0, low = high = 0;
4018 else if (subset && lowequal)
4020 in_p = 1, high = high0;
4021 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4022 integer_one_node, 0);
4024 else if (! subset || highequal)
4026 in_p = 1, low = low0;
4027 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4028 integer_one_node, 0);
4034 else if (! in0_p && in1_p)
4036 /* If they don't overlap, the result is the second range. If the second
4037 is a subset of the first, the result is false. Otherwise,
4038 the range starts just after the first range and ends at the
4039 end of the second. */
4041 in_p = 1, low = low1, high = high1;
4042 else if (subset || highequal)
4043 in_p = 0, low = high = 0;
4046 in_p = 1, high = high1;
4047 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4048 integer_one_node, 0);
4054 /* The case where we are excluding both ranges. Here the complex case
4055 is if they don't overlap. In that case, the only time we have a
4056 range is if they are adjacent. If the second is a subset of the
4057 first, the result is the first. Otherwise, the range to exclude
4058 starts at the beginning of the first range and ends at the end of the
4062 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4063 range_binop (PLUS_EXPR, NULL_TREE,
4065 integer_one_node, 1),
4067 in_p = 0, low = low0, high = high1;
4070 /* Canonicalize - [min, x] into - [-, x]. */
4071 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4072 switch (TREE_CODE (TREE_TYPE (low0)))
4075 if (TYPE_PRECISION (TREE_TYPE (low0))
4076 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4081 if (tree_int_cst_equal (low0,
4082 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4086 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4087 && integer_zerop (low0))
4094 /* Canonicalize - [x, max] into - [x, -]. */
4095 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4096 switch (TREE_CODE (TREE_TYPE (high1)))
4099 if (TYPE_PRECISION (TREE_TYPE (high1))
4100 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4105 if (tree_int_cst_equal (high1,
4106 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4110 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4111 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4113 integer_one_node, 1)))
4120 /* The ranges might be also adjacent between the maximum and
4121 minimum values of the given type. For
4122 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4123 return + [x + 1, y - 1]. */
4124 if (low0 == 0 && high1 == 0)
4126 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4127 integer_one_node, 1);
4128 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4129 integer_one_node, 0);
4130 if (low == 0 || high == 0)
4140 in_p = 0, low = low0, high = high0;
4142 in_p = 0, low = low0, high = high1;
4145 *pin_p = in_p, *plow = low, *phigh = high;
4150 /* Subroutine of fold, looking inside expressions of the form
4151 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4152 of the COND_EXPR. This function is being used also to optimize
4153 A op B ? C : A, by reversing the comparison first.
4155 Return a folded expression whose code is not a COND_EXPR
4156 anymore, or NULL_TREE if no folding opportunity is found. */
4159 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4161 enum tree_code comp_code = TREE_CODE (arg0);
4162 tree arg00 = TREE_OPERAND (arg0, 0);
4163 tree arg01 = TREE_OPERAND (arg0, 1);
4164 tree arg1_type = TREE_TYPE (arg1);
4170 /* If we have A op 0 ? A : -A, consider applying the following
4173 A == 0? A : -A same as -A
4174 A != 0? A : -A same as A
4175 A >= 0? A : -A same as abs (A)
4176 A > 0? A : -A same as abs (A)
4177 A <= 0? A : -A same as -abs (A)
4178 A < 0? A : -A same as -abs (A)
4180 None of these transformations work for modes with signed
4181 zeros. If A is +/-0, the first two transformations will
4182 change the sign of the result (from +0 to -0, or vice
4183 versa). The last four will fix the sign of the result,
4184 even though the original expressions could be positive or
4185 negative, depending on the sign of A.
4187 Note that all these transformations are correct if A is
4188 NaN, since the two alternatives (A and -A) are also NaNs. */
4189 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4190 ? real_zerop (arg01)
4191 : integer_zerop (arg01))
4192 && TREE_CODE (arg2) == NEGATE_EXPR
4193 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4198 tem = fold_convert (arg1_type, arg1);
4199 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4202 return pedantic_non_lvalue (fold_convert (type, arg1));
4205 if (flag_trapping_math)
4210 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4211 arg1 = fold_convert (lang_hooks.types.signed_type
4212 (TREE_TYPE (arg1)), arg1);
4213 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4214 return pedantic_non_lvalue (fold_convert (type, tem));
4217 if (flag_trapping_math)
4221 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4222 arg1 = fold_convert (lang_hooks.types.signed_type
4223 (TREE_TYPE (arg1)), arg1);
4224 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4225 return negate_expr (fold_convert (type, tem));
4227 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4231 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4232 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4233 both transformations are correct when A is NaN: A != 0
4234 is then true, and A == 0 is false. */
4236 if (integer_zerop (arg01) && integer_zerop (arg2))
4238 if (comp_code == NE_EXPR)
4239 return pedantic_non_lvalue (fold_convert (type, arg1));
4240 else if (comp_code == EQ_EXPR)
4241 return fold_convert (type, integer_zero_node);
4244 /* Try some transformations of A op B ? A : B.
4246 A == B? A : B same as B
4247 A != B? A : B same as A
4248 A >= B? A : B same as max (A, B)
4249 A > B? A : B same as max (B, A)
4250 A <= B? A : B same as min (A, B)
4251 A < B? A : B same as min (B, A)
4253 As above, these transformations don't work in the presence
4254 of signed zeros. For example, if A and B are zeros of
4255 opposite sign, the first two transformations will change
4256 the sign of the result. In the last four, the original
4257 expressions give different results for (A=+0, B=-0) and
4258 (A=-0, B=+0), but the transformed expressions do not.
4260 The first two transformations are correct if either A or B
4261 is a NaN. In the first transformation, the condition will
4262 be false, and B will indeed be chosen. In the case of the
4263 second transformation, the condition A != B will be true,
4264 and A will be chosen.
4266 The conversions to max() and min() are not correct if B is
4267 a number and A is not. The conditions in the original
4268 expressions will be false, so all four give B. The min()
4269 and max() versions would give a NaN instead. */
4270 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4272 tree comp_op0 = arg00;
4273 tree comp_op1 = arg01;
4274 tree comp_type = TREE_TYPE (comp_op0);
4276 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4277 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4287 return pedantic_non_lvalue (fold_convert (type, arg2));
4289 return pedantic_non_lvalue (fold_convert (type, arg1));
4294 /* In C++ a ?: expression can be an lvalue, so put the
4295 operand which will be used if they are equal first
4296 so that we can convert this back to the
4297 corresponding COND_EXPR. */
4298 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4300 comp_op0 = fold_convert (comp_type, comp_op0);
4301 comp_op1 = fold_convert (comp_type, comp_op1);
4302 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4303 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4304 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4305 return pedantic_non_lvalue (fold_convert (type, tem));
4312 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4314 comp_op0 = fold_convert (comp_type, comp_op0);
4315 comp_op1 = fold_convert (comp_type, comp_op1);
4316 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4317 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4318 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4319 return pedantic_non_lvalue (fold_convert (type, tem));
4323 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4324 return pedantic_non_lvalue (fold_convert (type, arg2));
4327 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4328 return pedantic_non_lvalue (fold_convert (type, arg1));
4331 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4336 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4337 we might still be able to simplify this. For example,
4338 if C1 is one less or one more than C2, this might have started
4339 out as a MIN or MAX and been transformed by this function.
4340 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4342 if (INTEGRAL_TYPE_P (type)
4343 && TREE_CODE (arg01) == INTEGER_CST
4344 && TREE_CODE (arg2) == INTEGER_CST)
4348 /* We can replace A with C1 in this case. */
4349 arg1 = fold_convert (type, arg01);
4350 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4353 /* If C1 is C2 + 1, this is min(A, C2). */
4354 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4356 && operand_equal_p (arg01,
4357 const_binop (PLUS_EXPR, arg2,
4358 integer_one_node, 0),
4360 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4361 type, arg1, arg2)));
4365 /* If C1 is C2 - 1, this is min(A, C2). */
4366 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4368 && operand_equal_p (arg01,
4369 const_binop (MINUS_EXPR, arg2,
4370 integer_one_node, 0),
4372 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4373 type, arg1, arg2)));
4377 /* If C1 is C2 - 1, this is max(A, C2). */
4378 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4380 && operand_equal_p (arg01,
4381 const_binop (MINUS_EXPR, arg2,
4382 integer_one_node, 0),
4384 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4385 type, arg1, arg2)));
4389 /* If C1 is C2 + 1, this is max(A, C2). */
4390 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4392 && operand_equal_p (arg01,
4393 const_binop (PLUS_EXPR, arg2,
4394 integer_one_node, 0),
4396 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4397 type, arg1, arg2)));
4410 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4411 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4414 /* EXP is some logical combination of boolean tests. See if we can
4415 merge it into some range test. Return the new tree if so. */
4418 fold_range_test (tree exp)
4420 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4421 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4422 int in0_p, in1_p, in_p;
4423 tree low0, low1, low, high0, high1, high;
4424 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4425 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4428 /* If this is an OR operation, invert both sides; we will invert
4429 again at the end. */
4431 in0_p = ! in0_p, in1_p = ! in1_p;
4433 /* If both expressions are the same, if we can merge the ranges, and we
4434 can build the range test, return it or it inverted. If one of the
4435 ranges is always true or always false, consider it to be the same
4436 expression as the other. */
4437 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4438 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4440 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4442 : rhs != 0 ? rhs : integer_zero_node,
4444 return or_op ? invert_truthvalue (tem) : tem;
4446 /* On machines where the branch cost is expensive, if this is a
4447 short-circuited branch and the underlying object on both sides
4448 is the same, make a non-short-circuit operation. */
4449 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4450 && lhs != 0 && rhs != 0
4451 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4452 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4453 && operand_equal_p (lhs, rhs, 0))
4455 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4456 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4457 which cases we can't do this. */
4458 if (simple_operand_p (lhs))
4459 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4460 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4461 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4462 TREE_OPERAND (exp, 1));
4464 else if (lang_hooks.decls.global_bindings_p () == 0
4465 && ! CONTAINS_PLACEHOLDER_P (lhs))
4467 tree common = save_expr (lhs);
4469 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4470 or_op ? ! in0_p : in0_p,
4472 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4473 or_op ? ! in1_p : in1_p,
4475 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4476 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4477 TREE_TYPE (exp), lhs, rhs);
4484 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4485 bit value. Arrange things so the extra bits will be set to zero if and
4486 only if C is signed-extended to its full width. If MASK is nonzero,
4487 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4490 unextend (tree c, int p, int unsignedp, tree mask)
4492 tree type = TREE_TYPE (c);
4493 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4496 if (p == modesize || unsignedp)
4499 /* We work by getting just the sign bit into the low-order bit, then
4500 into the high-order bit, then sign-extend. We then XOR that value
4502 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4503 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4505 /* We must use a signed type in order to get an arithmetic right shift.
4506 However, we must also avoid introducing accidental overflows, so that
4507 a subsequent call to integer_zerop will work. Hence we must
4508 do the type conversion here. At this point, the constant is either
4509 zero or one, and the conversion to a signed type can never overflow.
4510 We could get an overflow if this conversion is done anywhere else. */
4511 if (TYPE_UNSIGNED (type))
4512 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4514 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4515 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4517 temp = const_binop (BIT_AND_EXPR, temp,
4518 fold_convert (TREE_TYPE (c), mask), 0);
4519 /* If necessary, convert the type back to match the type of C. */
4520 if (TYPE_UNSIGNED (type))
4521 temp = fold_convert (type, temp);
4523 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4526 /* Find ways of folding logical expressions of LHS and RHS:
4527 Try to merge two comparisons to the same innermost item.
4528 Look for range tests like "ch >= '0' && ch <= '9'".
4529 Look for combinations of simple terms on machines with expensive branches
4530 and evaluate the RHS unconditionally.
4532 For example, if we have p->a == 2 && p->b == 4 and we can make an
4533 object large enough to span both A and B, we can do this with a comparison
4534 against the object ANDed with the a mask.
4536 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4537 operations to do this with one comparison.
4539 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4540 function and the one above.
4542 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4543 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4545 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4548 We return the simplified tree or 0 if no optimization is possible. */
4551 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4553 /* If this is the "or" of two comparisons, we can do something if
4554 the comparisons are NE_EXPR. If this is the "and", we can do something
4555 if the comparisons are EQ_EXPR. I.e.,
4556 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4558 WANTED_CODE is this operation code. For single bit fields, we can
4559 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4560 comparison for one-bit fields. */
4562 enum tree_code wanted_code;
4563 enum tree_code lcode, rcode;
4564 tree ll_arg, lr_arg, rl_arg, rr_arg;
4565 tree ll_inner, lr_inner, rl_inner, rr_inner;
4566 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4567 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4568 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4569 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4570 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4571 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4572 enum machine_mode lnmode, rnmode;
4573 tree ll_mask, lr_mask, rl_mask, rr_mask;
4574 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4575 tree l_const, r_const;
4576 tree lntype, rntype, result;
4577 int first_bit, end_bit;
4580 /* Start by getting the comparison codes. Fail if anything is volatile.
4581 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4582 it were surrounded with a NE_EXPR. */
4584 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4587 lcode = TREE_CODE (lhs);
4588 rcode = TREE_CODE (rhs);
4590 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4592 lhs = build2 (NE_EXPR, truth_type, lhs,
4593 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4597 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4599 rhs = build2 (NE_EXPR, truth_type, rhs,
4600 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4604 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4605 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4608 ll_arg = TREE_OPERAND (lhs, 0);
4609 lr_arg = TREE_OPERAND (lhs, 1);
4610 rl_arg = TREE_OPERAND (rhs, 0);
4611 rr_arg = TREE_OPERAND (rhs, 1);
4613 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4614 if (simple_operand_p (ll_arg)
4615 && simple_operand_p (lr_arg))
4618 if (operand_equal_p (ll_arg, rl_arg, 0)
4619 && operand_equal_p (lr_arg, rr_arg, 0))
4621 result = combine_comparisons (code, lcode, rcode,
4622 truth_type, ll_arg, lr_arg);
4626 else if (operand_equal_p (ll_arg, rr_arg, 0)
4627 && operand_equal_p (lr_arg, rl_arg, 0))
4629 result = combine_comparisons (code, lcode,
4630 swap_tree_comparison (rcode),
4631 truth_type, ll_arg, lr_arg);
4637 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4638 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4640 /* If the RHS can be evaluated unconditionally and its operands are
4641 simple, it wins to evaluate the RHS unconditionally on machines
4642 with expensive branches. In this case, this isn't a comparison
4643 that can be merged. Avoid doing this if the RHS is a floating-point
4644 comparison since those can trap. */
4646 if (BRANCH_COST >= 2
4647 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4648 && simple_operand_p (rl_arg)
4649 && simple_operand_p (rr_arg))
4651 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4652 if (code == TRUTH_OR_EXPR
4653 && lcode == NE_EXPR && integer_zerop (lr_arg)
4654 && rcode == NE_EXPR && integer_zerop (rr_arg)
4655 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4656 return build2 (NE_EXPR, truth_type,
4657 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4659 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4661 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4662 if (code == TRUTH_AND_EXPR
4663 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4664 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4665 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4666 return build2 (EQ_EXPR, truth_type,
4667 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4669 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4671 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4672 return build2 (code, truth_type, lhs, rhs);
4675 /* See if the comparisons can be merged. Then get all the parameters for
4678 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4679 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4683 ll_inner = decode_field_reference (ll_arg,
4684 &ll_bitsize, &ll_bitpos, &ll_mode,
4685 &ll_unsignedp, &volatilep, &ll_mask,
4687 lr_inner = decode_field_reference (lr_arg,
4688 &lr_bitsize, &lr_bitpos, &lr_mode,
4689 &lr_unsignedp, &volatilep, &lr_mask,
4691 rl_inner = decode_field_reference (rl_arg,
4692 &rl_bitsize, &rl_bitpos, &rl_mode,
4693 &rl_unsignedp, &volatilep, &rl_mask,
4695 rr_inner = decode_field_reference (rr_arg,
4696 &rr_bitsize, &rr_bitpos, &rr_mode,
4697 &rr_unsignedp, &volatilep, &rr_mask,
4700 /* It must be true that the inner operation on the lhs of each
4701 comparison must be the same if we are to be able to do anything.
4702 Then see if we have constants. If not, the same must be true for
4704 if (volatilep || ll_inner == 0 || rl_inner == 0
4705 || ! operand_equal_p (ll_inner, rl_inner, 0))
4708 if (TREE_CODE (lr_arg) == INTEGER_CST
4709 && TREE_CODE (rr_arg) == INTEGER_CST)
4710 l_const = lr_arg, r_const = rr_arg;
4711 else if (lr_inner == 0 || rr_inner == 0
4712 || ! operand_equal_p (lr_inner, rr_inner, 0))
4715 l_const = r_const = 0;
4717 /* If either comparison code is not correct for our logical operation,
4718 fail. However, we can convert a one-bit comparison against zero into
4719 the opposite comparison against that bit being set in the field. */
4721 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4722 if (lcode != wanted_code)
4724 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4726 /* Make the left operand unsigned, since we are only interested
4727 in the value of one bit. Otherwise we are doing the wrong
4736 /* This is analogous to the code for l_const above. */
4737 if (rcode != wanted_code)
4739 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4748 /* After this point all optimizations will generate bit-field
4749 references, which we might not want. */
4750 if (! lang_hooks.can_use_bit_fields_p ())
4753 /* See if we can find a mode that contains both fields being compared on
4754 the left. If we can't, fail. Otherwise, update all constants and masks
4755 to be relative to a field of that size. */
4756 first_bit = MIN (ll_bitpos, rl_bitpos);
4757 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4758 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4759 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4761 if (lnmode == VOIDmode)
4764 lnbitsize = GET_MODE_BITSIZE (lnmode);
4765 lnbitpos = first_bit & ~ (lnbitsize - 1);
4766 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4767 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4769 if (BYTES_BIG_ENDIAN)
4771 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4772 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4775 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4776 size_int (xll_bitpos), 0);
4777 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4778 size_int (xrl_bitpos), 0);
4782 l_const = fold_convert (lntype, l_const);
4783 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4784 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4785 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4786 fold (build1 (BIT_NOT_EXPR,
4790 warning ("comparison is always %d", wanted_code == NE_EXPR);
4792 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4797 r_const = fold_convert (lntype, r_const);
4798 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4799 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4800 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4801 fold (build1 (BIT_NOT_EXPR,
4805 warning ("comparison is always %d", wanted_code == NE_EXPR);
4807 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4811 /* If the right sides are not constant, do the same for it. Also,
4812 disallow this optimization if a size or signedness mismatch occurs
4813 between the left and right sides. */
4816 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4817 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4818 /* Make sure the two fields on the right
4819 correspond to the left without being swapped. */
4820 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4823 first_bit = MIN (lr_bitpos, rr_bitpos);
4824 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4825 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4826 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4828 if (rnmode == VOIDmode)
4831 rnbitsize = GET_MODE_BITSIZE (rnmode);
4832 rnbitpos = first_bit & ~ (rnbitsize - 1);
4833 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4834 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4836 if (BYTES_BIG_ENDIAN)
4838 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4839 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4842 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4843 size_int (xlr_bitpos), 0);
4844 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4845 size_int (xrr_bitpos), 0);
4847 /* Make a mask that corresponds to both fields being compared.
4848 Do this for both items being compared. If the operands are the
4849 same size and the bits being compared are in the same position
4850 then we can do this by masking both and comparing the masked
4852 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4853 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4854 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4856 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4857 ll_unsignedp || rl_unsignedp);
4858 if (! all_ones_mask_p (ll_mask, lnbitsize))
4859 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4861 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4862 lr_unsignedp || rr_unsignedp);
4863 if (! all_ones_mask_p (lr_mask, rnbitsize))
4864 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4866 return build2 (wanted_code, truth_type, lhs, rhs);
4869 /* There is still another way we can do something: If both pairs of
4870 fields being compared are adjacent, we may be able to make a wider
4871 field containing them both.
4873 Note that we still must mask the lhs/rhs expressions. Furthermore,
4874 the mask must be shifted to account for the shift done by
4875 make_bit_field_ref. */
4876 if ((ll_bitsize + ll_bitpos == rl_bitpos
4877 && lr_bitsize + lr_bitpos == rr_bitpos)
4878 || (ll_bitpos == rl_bitpos + rl_bitsize
4879 && lr_bitpos == rr_bitpos + rr_bitsize))
4883 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4884 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4885 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4886 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4888 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4889 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4890 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4891 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4893 /* Convert to the smaller type before masking out unwanted bits. */
4895 if (lntype != rntype)
4897 if (lnbitsize > rnbitsize)
4899 lhs = fold_convert (rntype, lhs);
4900 ll_mask = fold_convert (rntype, ll_mask);
4903 else if (lnbitsize < rnbitsize)
4905 rhs = fold_convert (lntype, rhs);
4906 lr_mask = fold_convert (lntype, lr_mask);
4911 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4912 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4914 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4915 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4917 return build2 (wanted_code, truth_type, lhs, rhs);
4923 /* Handle the case of comparisons with constants. If there is something in
4924 common between the masks, those bits of the constants must be the same.
4925 If not, the condition is always false. Test for this to avoid generating
4926 incorrect code below. */
4927 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4928 if (! integer_zerop (result)
4929 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4930 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4932 if (wanted_code == NE_EXPR)
4934 warning ("%<or%> of unmatched not-equal tests is always 1");
4935 return constant_boolean_node (true, truth_type);
4939 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4940 return constant_boolean_node (false, truth_type);
4944 /* Construct the expression we will return. First get the component
4945 reference we will make. Unless the mask is all ones the width of
4946 that field, perform the mask operation. Then compare with the
4948 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4949 ll_unsignedp || rl_unsignedp);
4951 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4952 if (! all_ones_mask_p (ll_mask, lnbitsize))
4953 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4955 return build2 (wanted_code, truth_type, result,
4956 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4959 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4963 optimize_minmax_comparison (tree t)
4965 tree type = TREE_TYPE (t);
4966 tree arg0 = TREE_OPERAND (t, 0);
4967 enum tree_code op_code;
4968 tree comp_const = TREE_OPERAND (t, 1);
4970 int consts_equal, consts_lt;
4973 STRIP_SIGN_NOPS (arg0);
4975 op_code = TREE_CODE (arg0);
4976 minmax_const = TREE_OPERAND (arg0, 1);
4977 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4978 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4979 inner = TREE_OPERAND (arg0, 0);
4981 /* If something does not permit us to optimize, return the original tree. */
4982 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4983 || TREE_CODE (comp_const) != INTEGER_CST
4984 || TREE_CONSTANT_OVERFLOW (comp_const)
4985 || TREE_CODE (minmax_const) != INTEGER_CST
4986 || TREE_CONSTANT_OVERFLOW (minmax_const))
4989 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4990 and GT_EXPR, doing the rest with recursive calls using logical
4992 switch (TREE_CODE (t))
4994 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4996 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
5000 fold (build2 (TRUTH_ORIF_EXPR, type,
5001 optimize_minmax_comparison
5002 (build2 (EQ_EXPR, type, arg0, comp_const)),
5003 optimize_minmax_comparison
5004 (build2 (GT_EXPR, type, arg0, comp_const))));
5007 if (op_code == MAX_EXPR && consts_equal)
5008 /* MAX (X, 0) == 0 -> X <= 0 */
5009 return fold (build2 (LE_EXPR, type, inner, comp_const));
5011 else if (op_code == MAX_EXPR && consts_lt)
5012 /* MAX (X, 0) == 5 -> X == 5 */
5013 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5015 else if (op_code == MAX_EXPR)
5016 /* MAX (X, 0) == -1 -> false */
5017 return omit_one_operand (type, integer_zero_node, inner);
5019 else if (consts_equal)
5020 /* MIN (X, 0) == 0 -> X >= 0 */
5021 return fold (build2 (GE_EXPR, type, inner, comp_const));
5024 /* MIN (X, 0) == 5 -> false */
5025 return omit_one_operand (type, integer_zero_node, inner);
5028 /* MIN (X, 0) == -1 -> X == -1 */
5029 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5032 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5033 /* MAX (X, 0) > 0 -> X > 0
5034 MAX (X, 0) > 5 -> X > 5 */
5035 return fold (build2 (GT_EXPR, type, inner, comp_const));
5037 else if (op_code == MAX_EXPR)
5038 /* MAX (X, 0) > -1 -> true */
5039 return omit_one_operand (type, integer_one_node, inner);
5041 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5042 /* MIN (X, 0) > 0 -> false
5043 MIN (X, 0) > 5 -> false */
5044 return omit_one_operand (type, integer_zero_node, inner);
5047 /* MIN (X, 0) > -1 -> X > -1 */
5048 return fold (build2 (GT_EXPR, type, inner, comp_const));
5055 /* T is an integer expression that is being multiplied, divided, or taken a
5056 modulus (CODE says which and what kind of divide or modulus) by a
5057 constant C. See if we can eliminate that operation by folding it with
5058 other operations already in T. WIDE_TYPE, if non-null, is a type that
5059 should be used for the computation if wider than our type.
5061 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5062 (X * 2) + (Y * 4). We must, however, be assured that either the original
5063 expression would not overflow or that overflow is undefined for the type
5064 in the language in question.
5066 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5067 the machine has a multiply-accumulate insn or that this is part of an
5068 addressing calculation.
5070 If we return a non-null expression, it is an equivalent form of the
5071 original computation, but need not be in the original type. */
5074 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5076 /* To avoid exponential search depth, refuse to allow recursion past
5077 three levels. Beyond that (1) it's highly unlikely that we'll find
5078 something interesting and (2) we've probably processed it before
5079 when we built the inner expression. */
5088 ret = extract_muldiv_1 (t, c, code, wide_type);
5095 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5097 tree type = TREE_TYPE (t);
5098 enum tree_code tcode = TREE_CODE (t);
5099 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5100 > GET_MODE_SIZE (TYPE_MODE (type)))
5101 ? wide_type : type);
5103 int same_p = tcode == code;
5104 tree op0 = NULL_TREE, op1 = NULL_TREE;
5106 /* Don't deal with constants of zero here; they confuse the code below. */
5107 if (integer_zerop (c))
5110 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5111 op0 = TREE_OPERAND (t, 0);
5113 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5114 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5116 /* Note that we need not handle conditional operations here since fold
5117 already handles those cases. So just do arithmetic here. */
5121 /* For a constant, we can always simplify if we are a multiply
5122 or (for divide and modulus) if it is a multiple of our constant. */
5123 if (code == MULT_EXPR
5124 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5125 return const_binop (code, fold_convert (ctype, t),
5126 fold_convert (ctype, c), 0);
5129 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5130 /* If op0 is an expression ... */
5131 if ((COMPARISON_CLASS_P (op0)
5132 || UNARY_CLASS_P (op0)
5133 || BINARY_CLASS_P (op0)
5134 || EXPRESSION_CLASS_P (op0))
5135 /* ... and is unsigned, and its type is smaller than ctype,
5136 then we cannot pass through as widening. */
5137 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5138 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5139 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5140 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5141 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5142 /* ... or this is a truncation (t is narrower than op0),
5143 then we cannot pass through this narrowing. */
5144 || (GET_MODE_SIZE (TYPE_MODE (type))
5145 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5146 /* ... or signedness changes for division or modulus,
5147 then we cannot pass through this conversion. */
5148 || (code != MULT_EXPR
5149 && (TYPE_UNSIGNED (ctype)
5150 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5153 /* Pass the constant down and see if we can make a simplification. If
5154 we can, replace this expression with the inner simplification for
5155 possible later conversion to our or some other type. */
5156 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5157 && TREE_CODE (t2) == INTEGER_CST
5158 && ! TREE_CONSTANT_OVERFLOW (t2)
5159 && (0 != (t1 = extract_muldiv (op0, t2, code,
5161 ? ctype : NULL_TREE))))
5166 /* If widening the type changes it from signed to unsigned, then we
5167 must avoid building ABS_EXPR itself as unsigned. */
5168 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5170 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5171 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5173 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5174 return fold_convert (ctype, t1);
5180 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5181 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5184 case MIN_EXPR: case MAX_EXPR:
5185 /* If widening the type changes the signedness, then we can't perform
5186 this optimization as that changes the result. */
5187 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5190 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5191 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5192 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5194 if (tree_int_cst_sgn (c) < 0)
5195 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5197 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5198 fold_convert (ctype, t2)));
5202 case LSHIFT_EXPR: case RSHIFT_EXPR:
5203 /* If the second operand is constant, this is a multiplication
5204 or floor division, by a power of two, so we can treat it that
5205 way unless the multiplier or divisor overflows. Signed
5206 left-shift overflow is implementation-defined rather than
5207 undefined in C90, so do not convert signed left shift into
5209 if (TREE_CODE (op1) == INTEGER_CST
5210 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5211 /* const_binop may not detect overflow correctly,
5212 so check for it explicitly here. */
5213 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5214 && TREE_INT_CST_HIGH (op1) == 0
5215 && 0 != (t1 = fold_convert (ctype,
5216 const_binop (LSHIFT_EXPR,
5219 && ! TREE_OVERFLOW (t1))
5220 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5221 ? MULT_EXPR : FLOOR_DIV_EXPR,
5222 ctype, fold_convert (ctype, op0), t1),
5223 c, code, wide_type);
5226 case PLUS_EXPR: case MINUS_EXPR:
5227 /* See if we can eliminate the operation on both sides. If we can, we
5228 can return a new PLUS or MINUS. If we can't, the only remaining
5229 cases where we can do anything are if the second operand is a
5231 t1 = extract_muldiv (op0, c, code, wide_type);
5232 t2 = extract_muldiv (op1, c, code, wide_type);
5233 if (t1 != 0 && t2 != 0
5234 && (code == MULT_EXPR
5235 /* If not multiplication, we can only do this if both operands
5236 are divisible by c. */
5237 || (multiple_of_p (ctype, op0, c)
5238 && multiple_of_p (ctype, op1, c))))
5239 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5240 fold_convert (ctype, t2)));
5242 /* If this was a subtraction, negate OP1 and set it to be an addition.
5243 This simplifies the logic below. */
5244 if (tcode == MINUS_EXPR)
5245 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5247 if (TREE_CODE (op1) != INTEGER_CST)
5250 /* If either OP1 or C are negative, this optimization is not safe for
5251 some of the division and remainder types while for others we need
5252 to change the code. */
5253 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5255 if (code == CEIL_DIV_EXPR)
5256 code = FLOOR_DIV_EXPR;
5257 else if (code == FLOOR_DIV_EXPR)
5258 code = CEIL_DIV_EXPR;
5259 else if (code != MULT_EXPR
5260 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5264 /* If it's a multiply or a division/modulus operation of a multiple
5265 of our constant, do the operation and verify it doesn't overflow. */
5266 if (code == MULT_EXPR
5267 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5269 op1 = const_binop (code, fold_convert (ctype, op1),
5270 fold_convert (ctype, c), 0);
5271 /* We allow the constant to overflow with wrapping semantics. */
5273 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5279 /* If we have an unsigned type is not a sizetype, we cannot widen
5280 the operation since it will change the result if the original
5281 computation overflowed. */
5282 if (TYPE_UNSIGNED (ctype)
5283 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5287 /* If we were able to eliminate our operation from the first side,
5288 apply our operation to the second side and reform the PLUS. */
5289 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5290 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5292 /* The last case is if we are a multiply. In that case, we can
5293 apply the distributive law to commute the multiply and addition
5294 if the multiplication of the constants doesn't overflow. */
5295 if (code == MULT_EXPR)
5296 return fold (build2 (tcode, ctype,
5297 fold (build2 (code, ctype,
5298 fold_convert (ctype, op0),
5299 fold_convert (ctype, c))),
5305 /* We have a special case here if we are doing something like
5306 (C * 8) % 4 since we know that's zero. */
5307 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5308 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5309 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5310 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5311 return omit_one_operand (type, integer_zero_node, op0);
5313 /* ... fall through ... */
5315 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5316 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5317 /* If we can extract our operation from the LHS, do so and return a
5318 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5319 do something only if the second operand is a constant. */
5321 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5322 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5323 fold_convert (ctype, op1)));
5324 else if (tcode == MULT_EXPR && code == MULT_EXPR
5325 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5326 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5327 fold_convert (ctype, t1)));
5328 else if (TREE_CODE (op1) != INTEGER_CST)
5331 /* If these are the same operation types, we can associate them
5332 assuming no overflow. */
5334 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5335 fold_convert (ctype, c), 0))
5336 && ! TREE_OVERFLOW (t1))
5337 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5339 /* If these operations "cancel" each other, we have the main
5340 optimizations of this pass, which occur when either constant is a
5341 multiple of the other, in which case we replace this with either an
5342 operation or CODE or TCODE.
5344 If we have an unsigned type that is not a sizetype, we cannot do
5345 this since it will change the result if the original computation
5347 if ((! TYPE_UNSIGNED (ctype)
5348 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5350 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5351 || (tcode == MULT_EXPR
5352 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5353 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5355 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5356 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5357 fold_convert (ctype,
5358 const_binop (TRUNC_DIV_EXPR,
5360 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5361 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5362 fold_convert (ctype,
5363 const_binop (TRUNC_DIV_EXPR,
5375 /* Return a node which has the indicated constant VALUE (either 0 or
5376 1), and is of the indicated TYPE. */
5379 constant_boolean_node (int value, tree type)
5381 if (type == integer_type_node)
5382 return value ? integer_one_node : integer_zero_node;
5383 else if (type == boolean_type_node)
5384 return value ? boolean_true_node : boolean_false_node;
5385 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5386 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5387 : integer_zero_node);
5389 return build_int_cst (type, value);
5393 /* Return true if expr looks like an ARRAY_REF and set base and
5394 offset to the appropriate trees. If there is no offset,
5395 offset is set to NULL_TREE. */
5398 extract_array_ref (tree expr, tree *base, tree *offset)
5400 /* We have to be careful with stripping nops as with the
5401 base type the meaning of the offset can change. */
5402 tree inner_expr = expr;
5403 STRIP_NOPS (inner_expr);
5404 /* One canonical form is a PLUS_EXPR with the first
5405 argument being an ADDR_EXPR with a possible NOP_EXPR
5407 if (TREE_CODE (expr) == PLUS_EXPR)
5409 tree op0 = TREE_OPERAND (expr, 0);
5411 if (TREE_CODE (op0) == ADDR_EXPR)
5413 *base = TREE_OPERAND (expr, 0);
5414 *offset = TREE_OPERAND (expr, 1);
5418 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5419 which we transform into an ADDR_EXPR with appropriate
5420 offset. For other arguments to the ADDR_EXPR we assume
5421 zero offset and as such do not care about the ADDR_EXPR
5422 type and strip possible nops from it. */
5423 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5425 tree op0 = TREE_OPERAND (inner_expr, 0);
5426 if (TREE_CODE (op0) == ARRAY_REF)
5428 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5429 *offset = TREE_OPERAND (op0, 1);
5434 *offset = NULL_TREE;
5443 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5444 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5445 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5446 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5447 COND is the first argument to CODE; otherwise (as in the example
5448 given here), it is the second argument. TYPE is the type of the
5449 original expression. Return NULL_TREE if no simplification is
5453 fold_binary_op_with_conditional_arg (tree t, enum tree_code code, tree cond,
5454 tree arg, int cond_first_p)
5456 const tree type = TREE_TYPE (t);
5457 tree cond_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 0))
5458 : TREE_TYPE (TREE_OPERAND (t, 1));
5459 tree arg_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 1))
5460 : TREE_TYPE (TREE_OPERAND (t, 0));
5461 tree test, true_value, false_value;
5462 tree lhs = NULL_TREE;
5463 tree rhs = NULL_TREE;
5465 /* This transformation is only worthwhile if we don't have to wrap
5466 arg in a SAVE_EXPR, and the operation can be simplified on at least
5467 one of the branches once its pushed inside the COND_EXPR. */
5468 if (!TREE_CONSTANT (arg))
5471 if (TREE_CODE (cond) == COND_EXPR)
5473 test = TREE_OPERAND (cond, 0);
5474 true_value = TREE_OPERAND (cond, 1);
5475 false_value = TREE_OPERAND (cond, 2);
5476 /* If this operand throws an expression, then it does not make
5477 sense to try to perform a logical or arithmetic operation
5479 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5481 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5486 tree testtype = TREE_TYPE (cond);
5488 true_value = constant_boolean_node (true, testtype);
5489 false_value = constant_boolean_node (false, testtype);
5492 arg = fold_convert (arg_type, arg);
5495 true_value = fold_convert (cond_type, true_value);
5496 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5497 : build2 (code, type, arg, true_value));
5501 false_value = fold_convert (cond_type, false_value);
5502 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5503 : build2 (code, type, arg, false_value));
5506 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5507 return fold_convert (type, test);
5511 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5513 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5514 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5515 ADDEND is the same as X.
5517 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5518 and finite. The problematic cases are when X is zero, and its mode
5519 has signed zeros. In the case of rounding towards -infinity,
5520 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5521 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5524 fold_real_zero_addition_p (tree type, tree addend, int negate)
5526 if (!real_zerop (addend))
5529 /* Don't allow the fold with -fsignaling-nans. */
5530 if (HONOR_SNANS (TYPE_MODE (type)))
5533 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5534 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5537 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5538 if (TREE_CODE (addend) == REAL_CST
5539 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5542 /* The mode has signed zeros, and we have to honor their sign.
5543 In this situation, there is only one case we can return true for.
5544 X - 0 is the same as X unless rounding towards -infinity is
5546 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5549 /* Subroutine of fold() that checks comparisons of built-in math
5550 functions against real constants.
5552 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5553 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5554 is the type of the result and ARG0 and ARG1 are the operands of the
5555 comparison. ARG1 must be a TREE_REAL_CST.
5557 The function returns the constant folded tree if a simplification
5558 can be made, and NULL_TREE otherwise. */
5561 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5562 tree type, tree arg0, tree arg1)
5566 if (BUILTIN_SQRT_P (fcode))
5568 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5569 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5571 c = TREE_REAL_CST (arg1);
5572 if (REAL_VALUE_NEGATIVE (c))
5574 /* sqrt(x) < y is always false, if y is negative. */
5575 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5576 return omit_one_operand (type, integer_zero_node, arg);
5578 /* sqrt(x) > y is always true, if y is negative and we
5579 don't care about NaNs, i.e. negative values of x. */
5580 if (code == NE_EXPR || !HONOR_NANS (mode))
5581 return omit_one_operand (type, integer_one_node, arg);
5583 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5584 return fold (build2 (GE_EXPR, type, arg,
5585 build_real (TREE_TYPE (arg), dconst0)));
5587 else if (code == GT_EXPR || code == GE_EXPR)
5591 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5592 real_convert (&c2, mode, &c2);
5594 if (REAL_VALUE_ISINF (c2))
5596 /* sqrt(x) > y is x == +Inf, when y is very large. */
5597 if (HONOR_INFINITIES (mode))
5598 return fold (build2 (EQ_EXPR, type, arg,
5599 build_real (TREE_TYPE (arg), c2)));
5601 /* sqrt(x) > y is always false, when y is very large
5602 and we don't care about infinities. */
5603 return omit_one_operand (type, integer_zero_node, arg);
5606 /* sqrt(x) > c is the same as x > c*c. */
5607 return fold (build2 (code, type, arg,
5608 build_real (TREE_TYPE (arg), c2)));
5610 else if (code == LT_EXPR || code == LE_EXPR)
5614 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5615 real_convert (&c2, mode, &c2);
5617 if (REAL_VALUE_ISINF (c2))
5619 /* sqrt(x) < y is always true, when y is a very large
5620 value and we don't care about NaNs or Infinities. */
5621 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5622 return omit_one_operand (type, integer_one_node, arg);
5624 /* sqrt(x) < y is x != +Inf when y is very large and we
5625 don't care about NaNs. */
5626 if (! HONOR_NANS (mode))
5627 return fold (build2 (NE_EXPR, type, arg,
5628 build_real (TREE_TYPE (arg), c2)));
5630 /* sqrt(x) < y is x >= 0 when y is very large and we
5631 don't care about Infinities. */
5632 if (! HONOR_INFINITIES (mode))
5633 return fold (build2 (GE_EXPR, type, arg,
5634 build_real (TREE_TYPE (arg), dconst0)));
5636 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5637 if (lang_hooks.decls.global_bindings_p () != 0
5638 || CONTAINS_PLACEHOLDER_P (arg))
5641 arg = save_expr (arg);
5642 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5643 fold (build2 (GE_EXPR, type, arg,
5644 build_real (TREE_TYPE (arg),
5646 fold (build2 (NE_EXPR, type, arg,
5647 build_real (TREE_TYPE (arg),
5651 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5652 if (! HONOR_NANS (mode))
5653 return fold (build2 (code, type, arg,
5654 build_real (TREE_TYPE (arg), c2)));
5656 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5657 if (lang_hooks.decls.global_bindings_p () == 0
5658 && ! CONTAINS_PLACEHOLDER_P (arg))
5660 arg = save_expr (arg);
5661 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5662 fold (build2 (GE_EXPR, type, arg,
5663 build_real (TREE_TYPE (arg),
5665 fold (build2 (code, type, arg,
5666 build_real (TREE_TYPE (arg),
5675 /* Subroutine of fold() that optimizes comparisons against Infinities,
5676 either +Inf or -Inf.
5678 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5679 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5680 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5682 The function returns the constant folded tree if a simplification
5683 can be made, and NULL_TREE otherwise. */
5686 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5688 enum machine_mode mode;
5689 REAL_VALUE_TYPE max;
5693 mode = TYPE_MODE (TREE_TYPE (arg0));
5695 /* For negative infinity swap the sense of the comparison. */
5696 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5698 code = swap_tree_comparison (code);
5703 /* x > +Inf is always false, if with ignore sNANs. */
5704 if (HONOR_SNANS (mode))
5706 return omit_one_operand (type, integer_zero_node, arg0);
5709 /* x <= +Inf is always true, if we don't case about NaNs. */
5710 if (! HONOR_NANS (mode))
5711 return omit_one_operand (type, integer_one_node, arg0);
5713 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5714 if (lang_hooks.decls.global_bindings_p () == 0
5715 && ! CONTAINS_PLACEHOLDER_P (arg0))
5717 arg0 = save_expr (arg0);
5718 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5724 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5725 real_maxval (&max, neg, mode);
5726 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5727 arg0, build_real (TREE_TYPE (arg0), max)));
5730 /* x < +Inf is always equal to x <= DBL_MAX. */
5731 real_maxval (&max, neg, mode);
5732 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5733 arg0, build_real (TREE_TYPE (arg0), max)));
5736 /* x != +Inf is always equal to !(x > DBL_MAX). */
5737 real_maxval (&max, neg, mode);
5738 if (! HONOR_NANS (mode))
5739 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5740 arg0, build_real (TREE_TYPE (arg0), max)));
5742 /* The transformation below creates non-gimple code and thus is
5743 not appropriate if we are in gimple form. */
5747 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5748 arg0, build_real (TREE_TYPE (arg0), max)));
5749 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5758 /* Subroutine of fold() that optimizes comparisons of a division by
5759 a nonzero integer constant against an integer constant, i.e.
5762 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5763 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5764 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5766 The function returns the constant folded tree if a simplification
5767 can be made, and NULL_TREE otherwise. */
5770 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5772 tree prod, tmp, hi, lo;
5773 tree arg00 = TREE_OPERAND (arg0, 0);
5774 tree arg01 = TREE_OPERAND (arg0, 1);
5775 unsigned HOST_WIDE_INT lpart;
5776 HOST_WIDE_INT hpart;
5779 /* We have to do this the hard way to detect unsigned overflow.
5780 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5781 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5782 TREE_INT_CST_HIGH (arg01),
5783 TREE_INT_CST_LOW (arg1),
5784 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5785 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5786 prod = force_fit_type (prod, -1, overflow, false);
5788 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5790 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5793 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5794 overflow = add_double (TREE_INT_CST_LOW (prod),
5795 TREE_INT_CST_HIGH (prod),
5796 TREE_INT_CST_LOW (tmp),
5797 TREE_INT_CST_HIGH (tmp),
5799 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5800 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5801 TREE_CONSTANT_OVERFLOW (prod));
5803 else if (tree_int_cst_sgn (arg01) >= 0)
5805 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5806 switch (tree_int_cst_sgn (arg1))
5809 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5814 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5819 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5829 /* A negative divisor reverses the relational operators. */
5830 code = swap_tree_comparison (code);
5832 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5833 switch (tree_int_cst_sgn (arg1))
5836 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5841 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5846 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5858 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5859 return omit_one_operand (type, integer_zero_node, arg00);
5860 if (TREE_OVERFLOW (hi))
5861 return fold (build2 (GE_EXPR, type, arg00, lo));
5862 if (TREE_OVERFLOW (lo))
5863 return fold (build2 (LE_EXPR, type, arg00, hi));
5864 return build_range_check (type, arg00, 1, lo, hi);
5867 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5868 return omit_one_operand (type, integer_one_node, arg00);
5869 if (TREE_OVERFLOW (hi))
5870 return fold (build2 (LT_EXPR, type, arg00, lo));
5871 if (TREE_OVERFLOW (lo))
5872 return fold (build2 (GT_EXPR, type, arg00, hi));
5873 return build_range_check (type, arg00, 0, lo, hi);
5876 if (TREE_OVERFLOW (lo))
5877 return omit_one_operand (type, integer_zero_node, arg00);
5878 return fold (build2 (LT_EXPR, type, arg00, lo));
5881 if (TREE_OVERFLOW (hi))
5882 return omit_one_operand (type, integer_one_node, arg00);
5883 return fold (build2 (LE_EXPR, type, arg00, hi));
5886 if (TREE_OVERFLOW (hi))
5887 return omit_one_operand (type, integer_zero_node, arg00);
5888 return fold (build2 (GT_EXPR, type, arg00, hi));
5891 if (TREE_OVERFLOW (lo))
5892 return omit_one_operand (type, integer_one_node, arg00);
5893 return fold (build2 (GE_EXPR, type, arg00, lo));
5903 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5904 equality/inequality test, then return a simplified form of
5905 the test using shifts and logical operations. Otherwise return
5906 NULL. TYPE is the desired result type. */
5909 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5912 /* If this is testing a single bit, we can optimize the test. */
5913 if ((code == NE_EXPR || code == EQ_EXPR)
5914 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5915 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5917 tree inner = TREE_OPERAND (arg0, 0);
5918 tree type = TREE_TYPE (arg0);
5919 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5920 enum machine_mode operand_mode = TYPE_MODE (type);
5922 tree signed_type, unsigned_type, intermediate_type;
5925 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5926 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5927 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5928 if (arg00 != NULL_TREE
5929 /* This is only a win if casting to a signed type is cheap,
5930 i.e. when arg00's type is not a partial mode. */
5931 && TYPE_PRECISION (TREE_TYPE (arg00))
5932 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5934 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5935 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5936 result_type, fold_convert (stype, arg00),
5937 fold_convert (stype, integer_zero_node)));
5940 /* Otherwise we have (A & C) != 0 where C is a single bit,
5941 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5942 Similarly for (A & C) == 0. */
5944 /* If INNER is a right shift of a constant and it plus BITNUM does
5945 not overflow, adjust BITNUM and INNER. */
5946 if (TREE_CODE (inner) == RSHIFT_EXPR
5947 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5948 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5949 && bitnum < TYPE_PRECISION (type)
5950 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5951 bitnum - TYPE_PRECISION (type)))
5953 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5954 inner = TREE_OPERAND (inner, 0);
5957 /* If we are going to be able to omit the AND below, we must do our
5958 operations as unsigned. If we must use the AND, we have a choice.
5959 Normally unsigned is faster, but for some machines signed is. */
5960 #ifdef LOAD_EXTEND_OP
5961 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5962 && !flag_syntax_only) ? 0 : 1;
5967 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5968 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5969 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5970 inner = fold_convert (intermediate_type, inner);
5973 inner = build2 (RSHIFT_EXPR, intermediate_type,
5974 inner, size_int (bitnum));
5976 if (code == EQ_EXPR)
5977 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5978 inner, integer_one_node));
5980 /* Put the AND last so it can combine with more things. */
5981 inner = build2 (BIT_AND_EXPR, intermediate_type,
5982 inner, integer_one_node);
5984 /* Make sure to return the proper type. */
5985 inner = fold_convert (result_type, inner);
5992 /* Check whether we are allowed to reorder operands arg0 and arg1,
5993 such that the evaluation of arg1 occurs before arg0. */
5996 reorder_operands_p (tree arg0, tree arg1)
5998 if (! flag_evaluation_order)
6000 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6002 return ! TREE_SIDE_EFFECTS (arg0)
6003 && ! TREE_SIDE_EFFECTS (arg1);
6006 /* Test whether it is preferable two swap two operands, ARG0 and
6007 ARG1, for example because ARG0 is an integer constant and ARG1
6008 isn't. If REORDER is true, only recommend swapping if we can
6009 evaluate the operands in reverse order. */
6012 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6014 STRIP_SIGN_NOPS (arg0);
6015 STRIP_SIGN_NOPS (arg1);
6017 if (TREE_CODE (arg1) == INTEGER_CST)
6019 if (TREE_CODE (arg0) == INTEGER_CST)
6022 if (TREE_CODE (arg1) == REAL_CST)
6024 if (TREE_CODE (arg0) == REAL_CST)
6027 if (TREE_CODE (arg1) == COMPLEX_CST)
6029 if (TREE_CODE (arg0) == COMPLEX_CST)
6032 if (TREE_CONSTANT (arg1))
6034 if (TREE_CONSTANT (arg0))
6040 if (reorder && flag_evaluation_order
6041 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6049 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6050 for commutative and comparison operators. Ensuring a canonical
6051 form allows the optimizers to find additional redundancies without
6052 having to explicitly check for both orderings. */
6053 if (TREE_CODE (arg0) == SSA_NAME
6054 && TREE_CODE (arg1) == SSA_NAME
6055 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6061 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6062 ARG0 is extended to a wider type. */
6065 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6067 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6069 tree shorter_type, outer_type;
6073 if (arg0_unw == arg0)
6075 shorter_type = TREE_TYPE (arg0_unw);
6077 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6080 arg1_unw = get_unwidened (arg1, shorter_type);
6084 /* If possible, express the comparison in the shorter mode. */
6085 if ((code == EQ_EXPR || code == NE_EXPR
6086 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6087 && (TREE_TYPE (arg1_unw) == shorter_type
6088 || (TREE_CODE (arg1_unw) == INTEGER_CST
6089 && TREE_CODE (shorter_type) == INTEGER_TYPE
6090 && int_fits_type_p (arg1_unw, shorter_type))))
6091 return fold (build (code, type, arg0_unw,
6092 fold_convert (shorter_type, arg1_unw)));
6094 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6097 /* If we are comparing with the integer that does not fit into the range
6098 of the shorter type, the result is known. */
6099 outer_type = TREE_TYPE (arg1_unw);
6100 min = lower_bound_in_type (outer_type, shorter_type);
6101 max = upper_bound_in_type (outer_type, shorter_type);
6103 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6105 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6112 return omit_one_operand (type, integer_zero_node, arg0);
6117 return omit_one_operand (type, integer_one_node, arg0);
6123 return omit_one_operand (type, integer_one_node, arg0);
6125 return omit_one_operand (type, integer_zero_node, arg0);
6130 return omit_one_operand (type, integer_zero_node, arg0);
6132 return omit_one_operand (type, integer_one_node, arg0);
6141 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6142 ARG0 just the signedness is changed. */
6145 fold_sign_changed_comparison (enum tree_code code, tree type,
6146 tree arg0, tree arg1)
6148 tree arg0_inner, tmp;
6149 tree inner_type, outer_type;
6151 if (TREE_CODE (arg0) != NOP_EXPR)
6154 outer_type = TREE_TYPE (arg0);
6155 arg0_inner = TREE_OPERAND (arg0, 0);
6156 inner_type = TREE_TYPE (arg0_inner);
6158 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6161 if (TREE_CODE (arg1) != INTEGER_CST
6162 && !(TREE_CODE (arg1) == NOP_EXPR
6163 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6166 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6171 if (TREE_CODE (arg1) == INTEGER_CST)
6173 tmp = build_int_cst_wide (inner_type,
6174 TREE_INT_CST_LOW (arg1),
6175 TREE_INT_CST_HIGH (arg1));
6176 arg1 = force_fit_type (tmp, 0,
6177 TREE_OVERFLOW (arg1),
6178 TREE_CONSTANT_OVERFLOW (arg1));
6181 arg1 = fold_convert (inner_type, arg1);
6183 return fold (build (code, type, arg0_inner, arg1));
6186 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6187 step of the array. ADDR is the address. MULT is the multiplicative expression.
6188 If the function succeeds, the new address expression is returned. Otherwise
6189 NULL_TREE is returned. */
6192 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6194 tree s, delta, step;
6195 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6196 tree ref = TREE_OPERAND (addr, 0), pref;
6203 if (TREE_CODE (arg0) == INTEGER_CST)
6208 else if (TREE_CODE (arg1) == INTEGER_CST)
6216 for (;; ref = TREE_OPERAND (ref, 0))
6218 if (TREE_CODE (ref) == ARRAY_REF)
6220 step = array_ref_element_size (ref);
6222 if (TREE_CODE (step) != INTEGER_CST)
6225 itype = TREE_TYPE (step);
6227 /* If the type sizes do not match, we might run into problems
6228 when one of them would overflow. */
6229 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6232 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6235 delta = fold_convert (itype, delta);
6239 if (!handled_component_p (ref))
6243 /* We found the suitable array reference. So copy everything up to it,
6244 and replace the index. */
6246 pref = TREE_OPERAND (addr, 0);
6247 ret = copy_node (pref);
6252 pref = TREE_OPERAND (pref, 0);
6253 TREE_OPERAND (pos, 0) = copy_node (pref);
6254 pos = TREE_OPERAND (pos, 0);
6257 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6258 TREE_OPERAND (pos, 1),
6261 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6265 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6266 means A >= Y && A != MAX, but in this case we know that
6267 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6270 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6272 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6274 if (TREE_CODE (bound) == LT_EXPR)
6275 a = TREE_OPERAND (bound, 0);
6276 else if (TREE_CODE (bound) == GT_EXPR)
6277 a = TREE_OPERAND (bound, 1);
6281 typea = TREE_TYPE (a);
6282 if (!INTEGRAL_TYPE_P (typea)
6283 && !POINTER_TYPE_P (typea))
6286 if (TREE_CODE (ineq) == LT_EXPR)
6288 a1 = TREE_OPERAND (ineq, 1);
6289 y = TREE_OPERAND (ineq, 0);
6291 else if (TREE_CODE (ineq) == GT_EXPR)
6293 a1 = TREE_OPERAND (ineq, 0);
6294 y = TREE_OPERAND (ineq, 1);
6299 if (TREE_TYPE (a1) != typea)
6302 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6303 if (!integer_onep (diff))
6306 return fold (build2 (GE_EXPR, type, a, y));
6309 /* Perform constant folding and related simplification of EXPR.
6310 The related simplifications include x*1 => x, x*0 => 0, etc.,
6311 and application of the associative law.
6312 NOP_EXPR conversions may be removed freely (as long as we
6313 are careful not to change the type of the overall expression).
6314 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6315 but we can constant-fold them if they have constant operands. */
6317 #ifdef ENABLE_FOLD_CHECKING
6318 # define fold(x) fold_1 (x)
6319 static tree fold_1 (tree);
6325 const tree t = expr;
6326 const tree type = TREE_TYPE (expr);
6327 tree t1 = NULL_TREE;
6329 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6330 enum tree_code code = TREE_CODE (t);
6331 enum tree_code_class kind = TREE_CODE_CLASS (code);
6333 /* WINS will be nonzero when the switch is done
6334 if all operands are constant. */
6337 /* Return right away if a constant. */
6338 if (kind == tcc_constant)
6341 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6345 /* Special case for conversion ops that can have fixed point args. */
6346 arg0 = TREE_OPERAND (t, 0);
6348 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6350 STRIP_SIGN_NOPS (arg0);
6352 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6353 subop = TREE_REALPART (arg0);
6357 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6358 && TREE_CODE (subop) != REAL_CST)
6359 /* Note that TREE_CONSTANT isn't enough:
6360 static var addresses are constant but we can't
6361 do arithmetic on them. */
6364 else if (IS_EXPR_CODE_CLASS (kind))
6366 int len = TREE_CODE_LENGTH (code);
6368 for (i = 0; i < len; i++)
6370 tree op = TREE_OPERAND (t, i);
6374 continue; /* Valid for CALL_EXPR, at least. */
6376 /* Strip any conversions that don't change the mode. This is
6377 safe for every expression, except for a comparison expression
6378 because its signedness is derived from its operands. So, in
6379 the latter case, only strip conversions that don't change the
6382 Note that this is done as an internal manipulation within the
6383 constant folder, in order to find the simplest representation
6384 of the arguments so that their form can be studied. In any
6385 cases, the appropriate type conversions should be put back in
6386 the tree that will get out of the constant folder. */
6387 if (kind == tcc_comparison)
6388 STRIP_SIGN_NOPS (op);
6392 if (TREE_CODE (op) == COMPLEX_CST)
6393 subop = TREE_REALPART (op);
6397 if (TREE_CODE (subop) != INTEGER_CST
6398 && TREE_CODE (subop) != REAL_CST)
6399 /* Note that TREE_CONSTANT isn't enough:
6400 static var addresses are constant but we can't
6401 do arithmetic on them. */
6411 /* If this is a commutative operation, and ARG0 is a constant, move it
6412 to ARG1 to reduce the number of tests below. */
6413 if (commutative_tree_code (code)
6414 && tree_swap_operands_p (arg0, arg1, true))
6415 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6416 TREE_OPERAND (t, 0)));
6418 /* Now WINS is set as described above,
6419 ARG0 is the first operand of EXPR,
6420 and ARG1 is the second operand (if it has more than one operand).
6422 First check for cases where an arithmetic operation is applied to a
6423 compound, conditional, or comparison operation. Push the arithmetic
6424 operation inside the compound or conditional to see if any folding
6425 can then be done. Convert comparison to conditional for this purpose.
6426 The also optimizes non-constant cases that used to be done in
6429 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6430 one of the operands is a comparison and the other is a comparison, a
6431 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6432 code below would make the expression more complex. Change it to a
6433 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6434 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6436 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6437 || code == EQ_EXPR || code == NE_EXPR)
6438 && ((truth_value_p (TREE_CODE (arg0))
6439 && (truth_value_p (TREE_CODE (arg1))
6440 || (TREE_CODE (arg1) == BIT_AND_EXPR
6441 && integer_onep (TREE_OPERAND (arg1, 1)))))
6442 || (truth_value_p (TREE_CODE (arg1))
6443 && (truth_value_p (TREE_CODE (arg0))
6444 || (TREE_CODE (arg0) == BIT_AND_EXPR
6445 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6447 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6448 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6450 type, fold_convert (boolean_type_node, arg0),
6451 fold_convert (boolean_type_node, arg1)));
6453 if (code == EQ_EXPR)
6454 tem = invert_truthvalue (tem);
6459 if (TREE_CODE_CLASS (code) == tcc_unary)
6461 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6462 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6463 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6464 else if (TREE_CODE (arg0) == COND_EXPR)
6466 tree arg01 = TREE_OPERAND (arg0, 1);
6467 tree arg02 = TREE_OPERAND (arg0, 2);
6468 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6469 arg01 = fold (build1 (code, type, arg01));
6470 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6471 arg02 = fold (build1 (code, type, arg02));
6472 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6475 /* If this was a conversion, and all we did was to move into
6476 inside the COND_EXPR, bring it back out. But leave it if
6477 it is a conversion from integer to integer and the
6478 result precision is no wider than a word since such a
6479 conversion is cheap and may be optimized away by combine,
6480 while it couldn't if it were outside the COND_EXPR. Then return
6481 so we don't get into an infinite recursion loop taking the
6482 conversion out and then back in. */
6484 if ((code == NOP_EXPR || code == CONVERT_EXPR
6485 || code == NON_LVALUE_EXPR)
6486 && TREE_CODE (tem) == COND_EXPR
6487 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6488 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6489 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6490 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6491 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6492 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6493 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6495 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6496 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6497 || flag_syntax_only))
6498 tem = build1 (code, type,
6500 TREE_TYPE (TREE_OPERAND
6501 (TREE_OPERAND (tem, 1), 0)),
6502 TREE_OPERAND (tem, 0),
6503 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6504 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6507 else if (COMPARISON_CLASS_P (arg0))
6509 if (TREE_CODE (type) == BOOLEAN_TYPE)
6511 arg0 = copy_node (arg0);
6512 TREE_TYPE (arg0) = type;
6515 else if (TREE_CODE (type) != INTEGER_TYPE)
6516 return fold (build3 (COND_EXPR, type, arg0,
6517 fold (build1 (code, type,
6519 fold (build1 (code, type,
6520 integer_zero_node))));
6523 else if (TREE_CODE_CLASS (code) == tcc_comparison
6524 && TREE_CODE (arg0) == COMPOUND_EXPR)
6525 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6526 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6527 else if (TREE_CODE_CLASS (code) == tcc_comparison
6528 && TREE_CODE (arg1) == COMPOUND_EXPR)
6529 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6530 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6531 else if (TREE_CODE_CLASS (code) == tcc_binary
6532 || TREE_CODE_CLASS (code) == tcc_comparison)
6534 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6535 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6536 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6538 if (TREE_CODE (arg1) == COMPOUND_EXPR
6539 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6540 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6541 fold (build2 (code, type,
6542 arg0, TREE_OPERAND (arg1, 1))));
6544 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6546 tem = fold_binary_op_with_conditional_arg (t, code, arg0, arg1,
6547 /*cond_first_p=*/1);
6548 if (tem != NULL_TREE)
6552 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6554 tem = fold_binary_op_with_conditional_arg (t, code, arg1, arg0,
6555 /*cond_first_p=*/0);
6556 if (tem != NULL_TREE)
6564 return fold (DECL_INITIAL (t));
6569 case FIX_TRUNC_EXPR:
6571 case FIX_FLOOR_EXPR:
6572 case FIX_ROUND_EXPR:
6573 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6574 return TREE_OPERAND (t, 0);
6576 /* Handle cases of two conversions in a row. */
6577 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6578 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6580 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6581 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6582 int inside_int = INTEGRAL_TYPE_P (inside_type);
6583 int inside_ptr = POINTER_TYPE_P (inside_type);
6584 int inside_float = FLOAT_TYPE_P (inside_type);
6585 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6586 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6587 int inter_int = INTEGRAL_TYPE_P (inter_type);
6588 int inter_ptr = POINTER_TYPE_P (inter_type);
6589 int inter_float = FLOAT_TYPE_P (inter_type);
6590 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6591 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6592 int final_int = INTEGRAL_TYPE_P (type);
6593 int final_ptr = POINTER_TYPE_P (type);
6594 int final_float = FLOAT_TYPE_P (type);
6595 unsigned int final_prec = TYPE_PRECISION (type);
6596 int final_unsignedp = TYPE_UNSIGNED (type);
6598 /* In addition to the cases of two conversions in a row
6599 handled below, if we are converting something to its own
6600 type via an object of identical or wider precision, neither
6601 conversion is needed. */
6602 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6603 && ((inter_int && final_int) || (inter_float && final_float))
6604 && inter_prec >= final_prec)
6605 return fold (build1 (code, type,
6606 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6608 /* Likewise, if the intermediate and final types are either both
6609 float or both integer, we don't need the middle conversion if
6610 it is wider than the final type and doesn't change the signedness
6611 (for integers). Avoid this if the final type is a pointer
6612 since then we sometimes need the inner conversion. Likewise if
6613 the outer has a precision not equal to the size of its mode. */
6614 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6615 || (inter_float && inside_float))
6616 && inter_prec >= inside_prec
6617 && (inter_float || inter_unsignedp == inside_unsignedp)
6618 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6619 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6621 return fold (build1 (code, type,
6622 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6624 /* If we have a sign-extension of a zero-extended value, we can
6625 replace that by a single zero-extension. */
6626 if (inside_int && inter_int && final_int
6627 && inside_prec < inter_prec && inter_prec < final_prec
6628 && inside_unsignedp && !inter_unsignedp)
6629 return fold (build1 (code, type,
6630 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6632 /* Two conversions in a row are not needed unless:
6633 - some conversion is floating-point (overstrict for now), or
6634 - the intermediate type is narrower than both initial and
6636 - the intermediate type and innermost type differ in signedness,
6637 and the outermost type is wider than the intermediate, or
6638 - the initial type is a pointer type and the precisions of the
6639 intermediate and final types differ, or
6640 - the final type is a pointer type and the precisions of the
6641 initial and intermediate types differ. */
6642 if (! inside_float && ! inter_float && ! final_float
6643 && (inter_prec > inside_prec || inter_prec > final_prec)
6644 && ! (inside_int && inter_int
6645 && inter_unsignedp != inside_unsignedp
6646 && inter_prec < final_prec)
6647 && ((inter_unsignedp && inter_prec > inside_prec)
6648 == (final_unsignedp && final_prec > inter_prec))
6649 && ! (inside_ptr && inter_prec != final_prec)
6650 && ! (final_ptr && inside_prec != inter_prec)
6651 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6652 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6654 return fold (build1 (code, type,
6655 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6658 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6659 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6660 /* Detect assigning a bitfield. */
6661 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6662 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6664 /* Don't leave an assignment inside a conversion
6665 unless assigning a bitfield. */
6666 tree prev = TREE_OPERAND (t, 0);
6667 tem = copy_node (t);
6668 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6669 /* First do the assignment, then return converted constant. */
6670 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6671 TREE_NO_WARNING (tem) = 1;
6672 TREE_USED (tem) = 1;
6676 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6677 constants (if x has signed type, the sign bit cannot be set
6678 in c). This folds extension into the BIT_AND_EXPR. */
6679 if (INTEGRAL_TYPE_P (type)
6680 && TREE_CODE (type) != BOOLEAN_TYPE
6681 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6682 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6684 tree and = TREE_OPERAND (t, 0);
6685 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6688 if (TYPE_UNSIGNED (TREE_TYPE (and))
6689 || (TYPE_PRECISION (type)
6690 <= TYPE_PRECISION (TREE_TYPE (and))))
6692 else if (TYPE_PRECISION (TREE_TYPE (and1))
6693 <= HOST_BITS_PER_WIDE_INT
6694 && host_integerp (and1, 1))
6696 unsigned HOST_WIDE_INT cst;
6698 cst = tree_low_cst (and1, 1);
6699 cst &= (HOST_WIDE_INT) -1
6700 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6701 change = (cst == 0);
6702 #ifdef LOAD_EXTEND_OP
6704 && !flag_syntax_only
6705 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6708 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6709 and0 = fold_convert (uns, and0);
6710 and1 = fold_convert (uns, and1);
6715 return fold (build2 (BIT_AND_EXPR, type,
6716 fold_convert (type, and0),
6717 fold_convert (type, and1)));
6720 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6721 T2 being pointers to types of the same size. */
6722 if (POINTER_TYPE_P (TREE_TYPE (t))
6723 && BINARY_CLASS_P (arg0)
6724 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6725 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6727 tree arg00 = TREE_OPERAND (arg0, 0);
6728 tree t0 = TREE_TYPE (t);
6729 tree t1 = TREE_TYPE (arg00);
6730 tree tt0 = TREE_TYPE (t0);
6731 tree tt1 = TREE_TYPE (t1);
6732 tree s0 = TYPE_SIZE (tt0);
6733 tree s1 = TYPE_SIZE (tt1);
6735 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6736 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6737 TREE_OPERAND (arg0, 1));
6740 tem = fold_convert_const (code, type, arg0);
6741 return tem ? tem : t;
6743 case VIEW_CONVERT_EXPR:
6744 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6745 return build1 (VIEW_CONVERT_EXPR, type,
6746 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6750 if (TREE_CODE (arg0) == CONSTRUCTOR
6751 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6753 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6755 return TREE_VALUE (m);
6760 if (TREE_CONSTANT (t) != wins)
6762 tem = copy_node (t);
6763 TREE_CONSTANT (tem) = wins;
6764 TREE_INVARIANT (tem) = wins;
6770 if (negate_expr_p (arg0))
6771 return fold_convert (type, negate_expr (arg0));
6775 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6776 return fold_abs_const (arg0, type);
6777 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6778 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6779 /* Convert fabs((double)float) into (double)fabsf(float). */
6780 else if (TREE_CODE (arg0) == NOP_EXPR
6781 && TREE_CODE (type) == REAL_TYPE)
6783 tree targ0 = strip_float_extensions (arg0);
6785 return fold_convert (type, fold (build1 (ABS_EXPR,
6789 else if (tree_expr_nonnegative_p (arg0))
6792 /* Strip sign ops from argument. */
6793 if (TREE_CODE (type) == REAL_TYPE)
6795 tem = fold_strip_sign_ops (arg0);
6797 return fold (build1 (ABS_EXPR, type, fold_convert (type, tem)));
6802 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6803 return fold_convert (type, arg0);
6804 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6805 return build2 (COMPLEX_EXPR, type,
6806 TREE_OPERAND (arg0, 0),
6807 negate_expr (TREE_OPERAND (arg0, 1)));
6808 else if (TREE_CODE (arg0) == COMPLEX_CST)
6809 return build_complex (type, TREE_REALPART (arg0),
6810 negate_expr (TREE_IMAGPART (arg0)));
6811 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6812 return fold (build2 (TREE_CODE (arg0), type,
6813 fold (build1 (CONJ_EXPR, type,
6814 TREE_OPERAND (arg0, 0))),
6815 fold (build1 (CONJ_EXPR, type,
6816 TREE_OPERAND (arg0, 1)))));
6817 else if (TREE_CODE (arg0) == CONJ_EXPR)
6818 return TREE_OPERAND (arg0, 0);
6822 if (TREE_CODE (arg0) == INTEGER_CST)
6823 return fold_not_const (arg0, type);
6824 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6825 return TREE_OPERAND (arg0, 0);
6829 /* A + (-B) -> A - B */
6830 if (TREE_CODE (arg1) == NEGATE_EXPR)
6831 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6832 /* (-A) + B -> B - A */
6833 if (TREE_CODE (arg0) == NEGATE_EXPR
6834 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6835 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6836 if (! FLOAT_TYPE_P (type))
6838 if (integer_zerop (arg1))
6839 return non_lvalue (fold_convert (type, arg0));
6841 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6842 with a constant, and the two constants have no bits in common,
6843 we should treat this as a BIT_IOR_EXPR since this may produce more
6845 if (TREE_CODE (arg0) == BIT_AND_EXPR
6846 && TREE_CODE (arg1) == BIT_AND_EXPR
6847 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6848 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6849 && integer_zerop (const_binop (BIT_AND_EXPR,
6850 TREE_OPERAND (arg0, 1),
6851 TREE_OPERAND (arg1, 1), 0)))
6853 code = BIT_IOR_EXPR;
6857 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6858 (plus (plus (mult) (mult)) (foo)) so that we can
6859 take advantage of the factoring cases below. */
6860 if (((TREE_CODE (arg0) == PLUS_EXPR
6861 || TREE_CODE (arg0) == MINUS_EXPR)
6862 && TREE_CODE (arg1) == MULT_EXPR)
6863 || ((TREE_CODE (arg1) == PLUS_EXPR
6864 || TREE_CODE (arg1) == MINUS_EXPR)
6865 && TREE_CODE (arg0) == MULT_EXPR))
6867 tree parg0, parg1, parg, marg;
6868 enum tree_code pcode;
6870 if (TREE_CODE (arg1) == MULT_EXPR)
6871 parg = arg0, marg = arg1;
6873 parg = arg1, marg = arg0;
6874 pcode = TREE_CODE (parg);
6875 parg0 = TREE_OPERAND (parg, 0);
6876 parg1 = TREE_OPERAND (parg, 1);
6880 if (TREE_CODE (parg0) == MULT_EXPR
6881 && TREE_CODE (parg1) != MULT_EXPR)
6882 return fold (build2 (pcode, type,
6883 fold (build2 (PLUS_EXPR, type,
6884 fold_convert (type, parg0),
6885 fold_convert (type, marg))),
6886 fold_convert (type, parg1)));
6887 if (TREE_CODE (parg0) != MULT_EXPR
6888 && TREE_CODE (parg1) == MULT_EXPR)
6889 return fold (build2 (PLUS_EXPR, type,
6890 fold_convert (type, parg0),
6891 fold (build2 (pcode, type,
6892 fold_convert (type, marg),
6897 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6899 tree arg00, arg01, arg10, arg11;
6900 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6902 /* (A * C) + (B * C) -> (A+B) * C.
6903 We are most concerned about the case where C is a constant,
6904 but other combinations show up during loop reduction. Since
6905 it is not difficult, try all four possibilities. */
6907 arg00 = TREE_OPERAND (arg0, 0);
6908 arg01 = TREE_OPERAND (arg0, 1);
6909 arg10 = TREE_OPERAND (arg1, 0);
6910 arg11 = TREE_OPERAND (arg1, 1);
6913 if (operand_equal_p (arg01, arg11, 0))
6914 same = arg01, alt0 = arg00, alt1 = arg10;
6915 else if (operand_equal_p (arg00, arg10, 0))
6916 same = arg00, alt0 = arg01, alt1 = arg11;
6917 else if (operand_equal_p (arg00, arg11, 0))
6918 same = arg00, alt0 = arg01, alt1 = arg10;
6919 else if (operand_equal_p (arg01, arg10, 0))
6920 same = arg01, alt0 = arg00, alt1 = arg11;
6922 /* No identical multiplicands; see if we can find a common
6923 power-of-two factor in non-power-of-two multiplies. This
6924 can help in multi-dimensional array access. */
6925 else if (TREE_CODE (arg01) == INTEGER_CST
6926 && TREE_CODE (arg11) == INTEGER_CST
6927 && TREE_INT_CST_HIGH (arg01) == 0
6928 && TREE_INT_CST_HIGH (arg11) == 0)
6930 HOST_WIDE_INT int01, int11, tmp;
6931 int01 = TREE_INT_CST_LOW (arg01);
6932 int11 = TREE_INT_CST_LOW (arg11);
6934 /* Move min of absolute values to int11. */
6935 if ((int01 >= 0 ? int01 : -int01)
6936 < (int11 >= 0 ? int11 : -int11))
6938 tmp = int01, int01 = int11, int11 = tmp;
6939 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6940 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6943 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6945 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6946 build_int_cst (NULL_TREE,
6954 return fold (build2 (MULT_EXPR, type,
6955 fold (build2 (PLUS_EXPR, type,
6956 fold_convert (type, alt0),
6957 fold_convert (type, alt1))),
6961 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6962 of the array. Loop optimizer sometimes produce this type of
6964 if (TREE_CODE (arg0) == ADDR_EXPR
6965 && TREE_CODE (arg1) == MULT_EXPR)
6967 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
6969 return fold_convert (type, fold (tem));
6971 else if (TREE_CODE (arg1) == ADDR_EXPR
6972 && TREE_CODE (arg0) == MULT_EXPR)
6974 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
6976 return fold_convert (type, fold (tem));
6981 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6982 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6983 return non_lvalue (fold_convert (type, arg0));
6985 /* Likewise if the operands are reversed. */
6986 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6987 return non_lvalue (fold_convert (type, arg1));
6989 /* Convert X + -C into X - C. */
6990 if (TREE_CODE (arg1) == REAL_CST
6991 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6993 tem = fold_negate_const (arg1, type);
6994 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6995 return fold (build2 (MINUS_EXPR, type,
6996 fold_convert (type, arg0),
6997 fold_convert (type, tem)));
7000 /* Convert x+x into x*2.0. */
7001 if (operand_equal_p (arg0, arg1, 0)
7002 && SCALAR_FLOAT_TYPE_P (type))
7003 return fold (build2 (MULT_EXPR, type, arg0,
7004 build_real (type, dconst2)));
7006 /* Convert x*c+x into x*(c+1). */
7007 if (flag_unsafe_math_optimizations
7008 && TREE_CODE (arg0) == MULT_EXPR
7009 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7010 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7011 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7015 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7016 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7017 return fold (build2 (MULT_EXPR, type, arg1,
7018 build_real (type, c)));
7021 /* Convert x+x*c into x*(c+1). */
7022 if (flag_unsafe_math_optimizations
7023 && TREE_CODE (arg1) == MULT_EXPR
7024 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7025 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7026 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7030 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7031 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7032 return fold (build2 (MULT_EXPR, type, arg0,
7033 build_real (type, c)));
7036 /* Convert x*c1+x*c2 into x*(c1+c2). */
7037 if (flag_unsafe_math_optimizations
7038 && TREE_CODE (arg0) == MULT_EXPR
7039 && TREE_CODE (arg1) == MULT_EXPR
7040 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7041 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7042 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7043 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7044 && operand_equal_p (TREE_OPERAND (arg0, 0),
7045 TREE_OPERAND (arg1, 0), 0))
7047 REAL_VALUE_TYPE c1, c2;
7049 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7050 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7051 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7052 return fold (build2 (MULT_EXPR, type,
7053 TREE_OPERAND (arg0, 0),
7054 build_real (type, c1)));
7056 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7057 if (flag_unsafe_math_optimizations
7058 && TREE_CODE (arg1) == PLUS_EXPR
7059 && TREE_CODE (arg0) != MULT_EXPR)
7061 tree tree10 = TREE_OPERAND (arg1, 0);
7062 tree tree11 = TREE_OPERAND (arg1, 1);
7063 if (TREE_CODE (tree11) == MULT_EXPR
7064 && TREE_CODE (tree10) == MULT_EXPR)
7067 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
7068 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
7071 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7072 if (flag_unsafe_math_optimizations
7073 && TREE_CODE (arg0) == PLUS_EXPR
7074 && TREE_CODE (arg1) != MULT_EXPR)
7076 tree tree00 = TREE_OPERAND (arg0, 0);
7077 tree tree01 = TREE_OPERAND (arg0, 1);
7078 if (TREE_CODE (tree01) == MULT_EXPR
7079 && TREE_CODE (tree00) == MULT_EXPR)
7082 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
7083 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
7089 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7090 is a rotate of A by C1 bits. */
7091 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7092 is a rotate of A by B bits. */
7094 enum tree_code code0, code1;
7095 code0 = TREE_CODE (arg0);
7096 code1 = TREE_CODE (arg1);
7097 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7098 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7099 && operand_equal_p (TREE_OPERAND (arg0, 0),
7100 TREE_OPERAND (arg1, 0), 0)
7101 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7103 tree tree01, tree11;
7104 enum tree_code code01, code11;
7106 tree01 = TREE_OPERAND (arg0, 1);
7107 tree11 = TREE_OPERAND (arg1, 1);
7108 STRIP_NOPS (tree01);
7109 STRIP_NOPS (tree11);
7110 code01 = TREE_CODE (tree01);
7111 code11 = TREE_CODE (tree11);
7112 if (code01 == INTEGER_CST
7113 && code11 == INTEGER_CST
7114 && TREE_INT_CST_HIGH (tree01) == 0
7115 && TREE_INT_CST_HIGH (tree11) == 0
7116 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7117 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7118 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7119 code0 == LSHIFT_EXPR ? tree01 : tree11);
7120 else if (code11 == MINUS_EXPR)
7122 tree tree110, tree111;
7123 tree110 = TREE_OPERAND (tree11, 0);
7124 tree111 = TREE_OPERAND (tree11, 1);
7125 STRIP_NOPS (tree110);
7126 STRIP_NOPS (tree111);
7127 if (TREE_CODE (tree110) == INTEGER_CST
7128 && 0 == compare_tree_int (tree110,
7130 (TREE_TYPE (TREE_OPERAND
7132 && operand_equal_p (tree01, tree111, 0))
7133 return build2 ((code0 == LSHIFT_EXPR
7136 type, TREE_OPERAND (arg0, 0), tree01);
7138 else if (code01 == MINUS_EXPR)
7140 tree tree010, tree011;
7141 tree010 = TREE_OPERAND (tree01, 0);
7142 tree011 = TREE_OPERAND (tree01, 1);
7143 STRIP_NOPS (tree010);
7144 STRIP_NOPS (tree011);
7145 if (TREE_CODE (tree010) == INTEGER_CST
7146 && 0 == compare_tree_int (tree010,
7148 (TREE_TYPE (TREE_OPERAND
7150 && operand_equal_p (tree11, tree011, 0))
7151 return build2 ((code0 != LSHIFT_EXPR
7154 type, TREE_OPERAND (arg0, 0), tree11);
7160 /* In most languages, can't associate operations on floats through
7161 parentheses. Rather than remember where the parentheses were, we
7162 don't associate floats at all, unless the user has specified
7163 -funsafe-math-optimizations. */
7166 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7168 tree var0, con0, lit0, minus_lit0;
7169 tree var1, con1, lit1, minus_lit1;
7171 /* Split both trees into variables, constants, and literals. Then
7172 associate each group together, the constants with literals,
7173 then the result with variables. This increases the chances of
7174 literals being recombined later and of generating relocatable
7175 expressions for the sum of a constant and literal. */
7176 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7177 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7178 code == MINUS_EXPR);
7180 /* Only do something if we found more than two objects. Otherwise,
7181 nothing has changed and we risk infinite recursion. */
7182 if (2 < ((var0 != 0) + (var1 != 0)
7183 + (con0 != 0) + (con1 != 0)
7184 + (lit0 != 0) + (lit1 != 0)
7185 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7187 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7188 if (code == MINUS_EXPR)
7191 var0 = associate_trees (var0, var1, code, type);
7192 con0 = associate_trees (con0, con1, code, type);
7193 lit0 = associate_trees (lit0, lit1, code, type);
7194 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7196 /* Preserve the MINUS_EXPR if the negative part of the literal is
7197 greater than the positive part. Otherwise, the multiplicative
7198 folding code (i.e extract_muldiv) may be fooled in case
7199 unsigned constants are subtracted, like in the following
7200 example: ((X*2 + 4) - 8U)/2. */
7201 if (minus_lit0 && lit0)
7203 if (TREE_CODE (lit0) == INTEGER_CST
7204 && TREE_CODE (minus_lit0) == INTEGER_CST
7205 && tree_int_cst_lt (lit0, minus_lit0))
7207 minus_lit0 = associate_trees (minus_lit0, lit0,
7213 lit0 = associate_trees (lit0, minus_lit0,
7221 return fold_convert (type,
7222 associate_trees (var0, minus_lit0,
7226 con0 = associate_trees (con0, minus_lit0,
7228 return fold_convert (type,
7229 associate_trees (var0, con0,
7234 con0 = associate_trees (con0, lit0, code, type);
7235 return fold_convert (type, associate_trees (var0, con0,
7242 t1 = const_binop (code, arg0, arg1, 0);
7243 if (t1 != NULL_TREE)
7245 /* The return value should always have
7246 the same type as the original expression. */
7247 if (TREE_TYPE (t1) != type)
7248 t1 = fold_convert (type, t1);
7255 /* A - (-B) -> A + B */
7256 if (TREE_CODE (arg1) == NEGATE_EXPR)
7257 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7258 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7259 if (TREE_CODE (arg0) == NEGATE_EXPR
7260 && (FLOAT_TYPE_P (type)
7261 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7262 && negate_expr_p (arg1)
7263 && reorder_operands_p (arg0, arg1))
7264 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7265 TREE_OPERAND (arg0, 0)));
7267 if (! FLOAT_TYPE_P (type))
7269 if (! wins && integer_zerop (arg0))
7270 return negate_expr (fold_convert (type, arg1));
7271 if (integer_zerop (arg1))
7272 return non_lvalue (fold_convert (type, arg0));
7274 /* Fold A - (A & B) into ~B & A. */
7275 if (!TREE_SIDE_EFFECTS (arg0)
7276 && TREE_CODE (arg1) == BIT_AND_EXPR)
7278 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7279 return fold (build2 (BIT_AND_EXPR, type,
7280 fold (build1 (BIT_NOT_EXPR, type,
7281 TREE_OPERAND (arg1, 0))),
7283 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7284 return fold (build2 (BIT_AND_EXPR, type,
7285 fold (build1 (BIT_NOT_EXPR, type,
7286 TREE_OPERAND (arg1, 1))),
7290 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7291 any power of 2 minus 1. */
7292 if (TREE_CODE (arg0) == BIT_AND_EXPR
7293 && TREE_CODE (arg1) == BIT_AND_EXPR
7294 && operand_equal_p (TREE_OPERAND (arg0, 0),
7295 TREE_OPERAND (arg1, 0), 0))
7297 tree mask0 = TREE_OPERAND (arg0, 1);
7298 tree mask1 = TREE_OPERAND (arg1, 1);
7299 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7301 if (operand_equal_p (tem, mask1, 0))
7303 tem = fold (build2 (BIT_XOR_EXPR, type,
7304 TREE_OPERAND (arg0, 0), mask1));
7305 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7310 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7311 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7312 return non_lvalue (fold_convert (type, arg0));
7314 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7315 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7316 (-ARG1 + ARG0) reduces to -ARG1. */
7317 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7318 return negate_expr (fold_convert (type, arg1));
7320 /* Fold &x - &x. This can happen from &x.foo - &x.
7321 This is unsafe for certain floats even in non-IEEE formats.
7322 In IEEE, it is unsafe because it does wrong for NaNs.
7323 Also note that operand_equal_p is always false if an operand
7326 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7327 && operand_equal_p (arg0, arg1, 0))
7328 return fold_convert (type, integer_zero_node);
7330 /* A - B -> A + (-B) if B is easily negatable. */
7331 if (!wins && negate_expr_p (arg1)
7332 && ((FLOAT_TYPE_P (type)
7333 /* Avoid this transformation if B is a positive REAL_CST. */
7334 && (TREE_CODE (arg1) != REAL_CST
7335 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7336 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7337 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7339 /* Try folding difference of addresses. */
7343 if ((TREE_CODE (arg0) == ADDR_EXPR
7344 || TREE_CODE (arg1) == ADDR_EXPR)
7345 && ptr_difference_const (arg0, arg1, &diff))
7346 return build_int_cst_type (type, diff);
7349 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7350 of the array. Loop optimizer sometimes produce this type of
7352 if (TREE_CODE (arg0) == ADDR_EXPR
7353 && TREE_CODE (arg1) == MULT_EXPR)
7355 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7357 return fold_convert (type, fold (tem));
7360 if (TREE_CODE (arg0) == MULT_EXPR
7361 && TREE_CODE (arg1) == MULT_EXPR
7362 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7364 /* (A * C) - (B * C) -> (A-B) * C. */
7365 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7366 TREE_OPERAND (arg1, 1), 0))
7367 return fold (build2 (MULT_EXPR, type,
7368 fold (build2 (MINUS_EXPR, type,
7369 TREE_OPERAND (arg0, 0),
7370 TREE_OPERAND (arg1, 0))),
7371 TREE_OPERAND (arg0, 1)));
7372 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7373 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7374 TREE_OPERAND (arg1, 0), 0))
7375 return fold (build2 (MULT_EXPR, type,
7376 TREE_OPERAND (arg0, 0),
7377 fold (build2 (MINUS_EXPR, type,
7378 TREE_OPERAND (arg0, 1),
7379 TREE_OPERAND (arg1, 1)))));
7385 /* (-A) * (-B) -> A * B */
7386 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7387 return fold (build2 (MULT_EXPR, type,
7388 TREE_OPERAND (arg0, 0),
7389 negate_expr (arg1)));
7390 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7391 return fold (build2 (MULT_EXPR, type,
7393 TREE_OPERAND (arg1, 0)));
7395 if (! FLOAT_TYPE_P (type))
7397 if (integer_zerop (arg1))
7398 return omit_one_operand (type, arg1, arg0);
7399 if (integer_onep (arg1))
7400 return non_lvalue (fold_convert (type, arg0));
7402 /* (a * (1 << b)) is (a << b) */
7403 if (TREE_CODE (arg1) == LSHIFT_EXPR
7404 && integer_onep (TREE_OPERAND (arg1, 0)))
7405 return fold (build2 (LSHIFT_EXPR, type, arg0,
7406 TREE_OPERAND (arg1, 1)));
7407 if (TREE_CODE (arg0) == LSHIFT_EXPR
7408 && integer_onep (TREE_OPERAND (arg0, 0)))
7409 return fold (build2 (LSHIFT_EXPR, type, arg1,
7410 TREE_OPERAND (arg0, 1)));
7412 if (TREE_CODE (arg1) == INTEGER_CST
7413 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7414 fold_convert (type, arg1),
7416 return fold_convert (type, tem);
7421 /* Maybe fold x * 0 to 0. The expressions aren't the same
7422 when x is NaN, since x * 0 is also NaN. Nor are they the
7423 same in modes with signed zeros, since multiplying a
7424 negative value by 0 gives -0, not +0. */
7425 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7426 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7427 && real_zerop (arg1))
7428 return omit_one_operand (type, arg1, arg0);
7429 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7430 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7431 && real_onep (arg1))
7432 return non_lvalue (fold_convert (type, arg0));
7434 /* Transform x * -1.0 into -x. */
7435 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7436 && real_minus_onep (arg1))
7437 return fold_convert (type, negate_expr (arg0));
7439 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7440 if (flag_unsafe_math_optimizations
7441 && TREE_CODE (arg0) == RDIV_EXPR
7442 && TREE_CODE (arg1) == REAL_CST
7443 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7445 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7448 return fold (build2 (RDIV_EXPR, type, tem,
7449 TREE_OPERAND (arg0, 1)));
7452 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7453 if (operand_equal_p (arg0, arg1, 0))
7455 tree tem = fold_strip_sign_ops (arg0);
7456 if (tem != NULL_TREE)
7458 tem = fold_convert (type, tem);
7459 return fold (build2 (MULT_EXPR, type, tem, tem));
7463 if (flag_unsafe_math_optimizations)
7465 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7466 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7468 /* Optimizations of root(...)*root(...). */
7469 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7471 tree rootfn, arg, arglist;
7472 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7473 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7475 /* Optimize sqrt(x)*sqrt(x) as x. */
7476 if (BUILTIN_SQRT_P (fcode0)
7477 && operand_equal_p (arg00, arg10, 0)
7478 && ! HONOR_SNANS (TYPE_MODE (type)))
7481 /* Optimize root(x)*root(y) as root(x*y). */
7482 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7483 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7484 arglist = build_tree_list (NULL_TREE, arg);
7485 return build_function_call_expr (rootfn, arglist);
7488 /* Optimize expN(x)*expN(y) as expN(x+y). */
7489 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7491 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7492 tree arg = build2 (PLUS_EXPR, type,
7493 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7494 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7495 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7496 return build_function_call_expr (expfn, arglist);
7499 /* Optimizations of pow(...)*pow(...). */
7500 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7501 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7502 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7504 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7505 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7507 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7508 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7511 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7512 if (operand_equal_p (arg01, arg11, 0))
7514 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7515 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7516 tree arglist = tree_cons (NULL_TREE, fold (arg),
7517 build_tree_list (NULL_TREE,
7519 return build_function_call_expr (powfn, arglist);
7522 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7523 if (operand_equal_p (arg00, arg10, 0))
7525 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7526 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7527 tree arglist = tree_cons (NULL_TREE, arg00,
7528 build_tree_list (NULL_TREE,
7530 return build_function_call_expr (powfn, arglist);
7534 /* Optimize tan(x)*cos(x) as sin(x). */
7535 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7536 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7537 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7538 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7539 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7540 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7541 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7542 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7544 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7546 if (sinfn != NULL_TREE)
7547 return build_function_call_expr (sinfn,
7548 TREE_OPERAND (arg0, 1));
7551 /* Optimize x*pow(x,c) as pow(x,c+1). */
7552 if (fcode1 == BUILT_IN_POW
7553 || fcode1 == BUILT_IN_POWF
7554 || fcode1 == BUILT_IN_POWL)
7556 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7557 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7559 if (TREE_CODE (arg11) == REAL_CST
7560 && ! TREE_CONSTANT_OVERFLOW (arg11)
7561 && operand_equal_p (arg0, arg10, 0))
7563 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7567 c = TREE_REAL_CST (arg11);
7568 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7569 arg = build_real (type, c);
7570 arglist = build_tree_list (NULL_TREE, arg);
7571 arglist = tree_cons (NULL_TREE, arg0, arglist);
7572 return build_function_call_expr (powfn, arglist);
7576 /* Optimize pow(x,c)*x as pow(x,c+1). */
7577 if (fcode0 == BUILT_IN_POW
7578 || fcode0 == BUILT_IN_POWF
7579 || fcode0 == BUILT_IN_POWL)
7581 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7582 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7584 if (TREE_CODE (arg01) == REAL_CST
7585 && ! TREE_CONSTANT_OVERFLOW (arg01)
7586 && operand_equal_p (arg1, arg00, 0))
7588 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7592 c = TREE_REAL_CST (arg01);
7593 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7594 arg = build_real (type, c);
7595 arglist = build_tree_list (NULL_TREE, arg);
7596 arglist = tree_cons (NULL_TREE, arg1, arglist);
7597 return build_function_call_expr (powfn, arglist);
7601 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7603 && operand_equal_p (arg0, arg1, 0))
7605 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7609 tree arg = build_real (type, dconst2);
7610 tree arglist = build_tree_list (NULL_TREE, arg);
7611 arglist = tree_cons (NULL_TREE, arg0, arglist);
7612 return build_function_call_expr (powfn, arglist);
7621 if (integer_all_onesp (arg1))
7622 return omit_one_operand (type, arg1, arg0);
7623 if (integer_zerop (arg1))
7624 return non_lvalue (fold_convert (type, arg0));
7625 if (operand_equal_p (arg0, arg1, 0))
7626 return non_lvalue (fold_convert (type, arg0));
7629 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7630 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7632 t1 = build_int_cst (type, -1);
7633 t1 = force_fit_type (t1, 0, false, false);
7634 return omit_one_operand (type, t1, arg1);
7638 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7639 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7641 t1 = build_int_cst (type, -1);
7642 t1 = force_fit_type (t1, 0, false, false);
7643 return omit_one_operand (type, t1, arg0);
7646 t1 = distribute_bit_expr (code, type, arg0, arg1);
7647 if (t1 != NULL_TREE)
7650 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7652 This results in more efficient code for machines without a NAND
7653 instruction. Combine will canonicalize to the first form
7654 which will allow use of NAND instructions provided by the
7655 backend if they exist. */
7656 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7657 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7659 return fold (build1 (BIT_NOT_EXPR, type,
7660 build2 (BIT_AND_EXPR, type,
7661 TREE_OPERAND (arg0, 0),
7662 TREE_OPERAND (arg1, 0))));
7665 /* See if this can be simplified into a rotate first. If that
7666 is unsuccessful continue in the association code. */
7670 if (integer_zerop (arg1))
7671 return non_lvalue (fold_convert (type, arg0));
7672 if (integer_all_onesp (arg1))
7673 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7674 if (operand_equal_p (arg0, arg1, 0))
7675 return omit_one_operand (type, integer_zero_node, arg0);
7678 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7679 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7681 t1 = build_int_cst (type, -1);
7682 t1 = force_fit_type (t1, 0, false, false);
7683 return omit_one_operand (type, t1, arg1);
7687 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7688 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7690 t1 = build_int_cst (type, -1);
7691 t1 = force_fit_type (t1, 0, false, false);
7692 return omit_one_operand (type, t1, arg0);
7695 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7696 with a constant, and the two constants have no bits in common,
7697 we should treat this as a BIT_IOR_EXPR since this may produce more
7699 if (TREE_CODE (arg0) == BIT_AND_EXPR
7700 && TREE_CODE (arg1) == BIT_AND_EXPR
7701 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7702 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7703 && integer_zerop (const_binop (BIT_AND_EXPR,
7704 TREE_OPERAND (arg0, 1),
7705 TREE_OPERAND (arg1, 1), 0)))
7707 code = BIT_IOR_EXPR;
7711 /* See if this can be simplified into a rotate first. If that
7712 is unsuccessful continue in the association code. */
7716 if (integer_all_onesp (arg1))
7717 return non_lvalue (fold_convert (type, arg0));
7718 if (integer_zerop (arg1))
7719 return omit_one_operand (type, arg1, arg0);
7720 if (operand_equal_p (arg0, arg1, 0))
7721 return non_lvalue (fold_convert (type, arg0));
7723 /* ~X & X is always zero. */
7724 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7725 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7726 return omit_one_operand (type, integer_zero_node, arg1);
7728 /* X & ~X is always zero. */
7729 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7730 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7731 return omit_one_operand (type, integer_zero_node, arg0);
7733 t1 = distribute_bit_expr (code, type, arg0, arg1);
7734 if (t1 != NULL_TREE)
7736 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7737 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7738 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7741 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7743 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7744 && (~TREE_INT_CST_LOW (arg1)
7745 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7746 return fold_convert (type, TREE_OPERAND (arg0, 0));
7749 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7751 This results in more efficient code for machines without a NOR
7752 instruction. Combine will canonicalize to the first form
7753 which will allow use of NOR instructions provided by the
7754 backend if they exist. */
7755 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7756 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7758 return fold (build1 (BIT_NOT_EXPR, type,
7759 build2 (BIT_IOR_EXPR, type,
7760 TREE_OPERAND (arg0, 0),
7761 TREE_OPERAND (arg1, 0))));
7767 /* Don't touch a floating-point divide by zero unless the mode
7768 of the constant can represent infinity. */
7769 if (TREE_CODE (arg1) == REAL_CST
7770 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7771 && real_zerop (arg1))
7774 /* (-A) / (-B) -> A / B */
7775 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7776 return fold (build2 (RDIV_EXPR, type,
7777 TREE_OPERAND (arg0, 0),
7778 negate_expr (arg1)));
7779 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7780 return fold (build2 (RDIV_EXPR, type,
7782 TREE_OPERAND (arg1, 0)));
7784 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7785 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7786 && real_onep (arg1))
7787 return non_lvalue (fold_convert (type, arg0));
7789 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7790 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7791 && real_minus_onep (arg1))
7792 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7794 /* If ARG1 is a constant, we can convert this to a multiply by the
7795 reciprocal. This does not have the same rounding properties,
7796 so only do this if -funsafe-math-optimizations. We can actually
7797 always safely do it if ARG1 is a power of two, but it's hard to
7798 tell if it is or not in a portable manner. */
7799 if (TREE_CODE (arg1) == REAL_CST)
7801 if (flag_unsafe_math_optimizations
7802 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7804 return fold (build2 (MULT_EXPR, type, arg0, tem));
7805 /* Find the reciprocal if optimizing and the result is exact. */
7809 r = TREE_REAL_CST (arg1);
7810 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7812 tem = build_real (type, r);
7813 return fold (build2 (MULT_EXPR, type, arg0, tem));
7817 /* Convert A/B/C to A/(B*C). */
7818 if (flag_unsafe_math_optimizations
7819 && TREE_CODE (arg0) == RDIV_EXPR)
7820 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7821 fold (build2 (MULT_EXPR, type,
7822 TREE_OPERAND (arg0, 1), arg1))));
7824 /* Convert A/(B/C) to (A/B)*C. */
7825 if (flag_unsafe_math_optimizations
7826 && TREE_CODE (arg1) == RDIV_EXPR)
7827 return fold (build2 (MULT_EXPR, type,
7828 fold (build2 (RDIV_EXPR, type, arg0,
7829 TREE_OPERAND (arg1, 0))),
7830 TREE_OPERAND (arg1, 1)));
7832 /* Convert C1/(X*C2) into (C1/C2)/X. */
7833 if (flag_unsafe_math_optimizations
7834 && TREE_CODE (arg1) == MULT_EXPR
7835 && TREE_CODE (arg0) == REAL_CST
7836 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7838 tree tem = const_binop (RDIV_EXPR, arg0,
7839 TREE_OPERAND (arg1, 1), 0);
7841 return fold (build2 (RDIV_EXPR, type, tem,
7842 TREE_OPERAND (arg1, 0)));
7845 if (flag_unsafe_math_optimizations)
7847 enum built_in_function fcode = builtin_mathfn_code (arg1);
7848 /* Optimize x/expN(y) into x*expN(-y). */
7849 if (BUILTIN_EXPONENT_P (fcode))
7851 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7852 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7853 tree arglist = build_tree_list (NULL_TREE,
7854 fold_convert (type, arg));
7855 arg1 = build_function_call_expr (expfn, arglist);
7856 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7859 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7860 if (fcode == BUILT_IN_POW
7861 || fcode == BUILT_IN_POWF
7862 || fcode == BUILT_IN_POWL)
7864 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7865 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7866 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7867 tree neg11 = fold_convert (type, negate_expr (arg11));
7868 tree arglist = tree_cons(NULL_TREE, arg10,
7869 build_tree_list (NULL_TREE, neg11));
7870 arg1 = build_function_call_expr (powfn, arglist);
7871 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7875 if (flag_unsafe_math_optimizations)
7877 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7878 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7880 /* Optimize sin(x)/cos(x) as tan(x). */
7881 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7882 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7883 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7884 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7885 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7887 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7889 if (tanfn != NULL_TREE)
7890 return build_function_call_expr (tanfn,
7891 TREE_OPERAND (arg0, 1));
7894 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7895 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7896 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7897 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7898 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7899 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7901 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7903 if (tanfn != NULL_TREE)
7905 tree tmp = TREE_OPERAND (arg0, 1);
7906 tmp = build_function_call_expr (tanfn, tmp);
7907 return fold (build2 (RDIV_EXPR, type,
7908 build_real (type, dconst1), tmp));
7912 /* Optimize pow(x,c)/x as pow(x,c-1). */
7913 if (fcode0 == BUILT_IN_POW
7914 || fcode0 == BUILT_IN_POWF
7915 || fcode0 == BUILT_IN_POWL)
7917 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7918 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7919 if (TREE_CODE (arg01) == REAL_CST
7920 && ! TREE_CONSTANT_OVERFLOW (arg01)
7921 && operand_equal_p (arg1, arg00, 0))
7923 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7927 c = TREE_REAL_CST (arg01);
7928 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7929 arg = build_real (type, c);
7930 arglist = build_tree_list (NULL_TREE, arg);
7931 arglist = tree_cons (NULL_TREE, arg1, arglist);
7932 return build_function_call_expr (powfn, arglist);
7938 case TRUNC_DIV_EXPR:
7939 case ROUND_DIV_EXPR:
7940 case FLOOR_DIV_EXPR:
7942 case EXACT_DIV_EXPR:
7943 if (integer_onep (arg1))
7944 return non_lvalue (fold_convert (type, arg0));
7945 if (integer_zerop (arg1))
7948 if (!TYPE_UNSIGNED (type)
7949 && TREE_CODE (arg1) == INTEGER_CST
7950 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7951 && TREE_INT_CST_HIGH (arg1) == -1)
7952 return fold_convert (type, negate_expr (arg0));
7954 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7955 operation, EXACT_DIV_EXPR.
7957 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7958 At one time others generated faster code, it's not clear if they do
7959 after the last round to changes to the DIV code in expmed.c. */
7960 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7961 && multiple_of_p (type, arg0, arg1))
7962 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7964 if (TREE_CODE (arg1) == INTEGER_CST
7965 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7967 return fold_convert (type, tem);
7972 case FLOOR_MOD_EXPR:
7973 case ROUND_MOD_EXPR:
7974 case TRUNC_MOD_EXPR:
7975 /* X % 1 is always zero, but be sure to preserve any side
7977 if (integer_onep (arg1))
7978 return omit_one_operand (type, integer_zero_node, arg0);
7980 /* X % 0, return X % 0 unchanged so that we can get the
7981 proper warnings and errors. */
7982 if (integer_zerop (arg1))
7985 /* 0 % X is always zero, but be sure to preserve any side
7986 effects in X. Place this after checking for X == 0. */
7987 if (integer_zerop (arg0))
7988 return omit_one_operand (type, integer_zero_node, arg1);
7990 /* X % -1 is zero. */
7991 if (!TYPE_UNSIGNED (type)
7992 && TREE_CODE (arg1) == INTEGER_CST
7993 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7994 && TREE_INT_CST_HIGH (arg1) == -1)
7995 return omit_one_operand (type, integer_zero_node, arg0);
7997 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7998 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7999 if (code == TRUNC_MOD_EXPR
8000 && TYPE_UNSIGNED (type)
8001 && integer_pow2p (arg1))
8003 unsigned HOST_WIDE_INT high, low;
8007 l = tree_log2 (arg1);
8008 if (l >= HOST_BITS_PER_WIDE_INT)
8010 high = ((unsigned HOST_WIDE_INT) 1
8011 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8017 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8020 mask = build_int_cst_wide (type, low, high);
8021 return fold (build2 (BIT_AND_EXPR, type,
8022 fold_convert (type, arg0), mask));
8025 /* X % -C is the same as X % C. */
8026 if (code == TRUNC_MOD_EXPR
8027 && !TYPE_UNSIGNED (type)
8028 && TREE_CODE (arg1) == INTEGER_CST
8029 && TREE_INT_CST_HIGH (arg1) < 0
8031 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8032 && !sign_bit_p (arg1, arg1))
8033 return fold (build2 (code, type, fold_convert (type, arg0),
8034 fold_convert (type, negate_expr (arg1))));
8036 /* X % -Y is the same as X % Y. */
8037 if (code == TRUNC_MOD_EXPR
8038 && !TYPE_UNSIGNED (type)
8039 && TREE_CODE (arg1) == NEGATE_EXPR
8041 return fold (build2 (code, type, fold_convert (type, arg0),
8042 fold_convert (type, TREE_OPERAND (arg1, 0))));
8044 if (TREE_CODE (arg1) == INTEGER_CST
8045 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
8047 return fold_convert (type, tem);
8053 if (integer_all_onesp (arg0))
8054 return omit_one_operand (type, arg0, arg1);
8058 /* Optimize -1 >> x for arithmetic right shifts. */
8059 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8060 return omit_one_operand (type, arg0, arg1);
8061 /* ... fall through ... */
8065 if (integer_zerop (arg1))
8066 return non_lvalue (fold_convert (type, arg0));
8067 if (integer_zerop (arg0))
8068 return omit_one_operand (type, arg0, arg1);
8070 /* Since negative shift count is not well-defined,
8071 don't try to compute it in the compiler. */
8072 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8074 /* Rewrite an LROTATE_EXPR by a constant into an
8075 RROTATE_EXPR by a new constant. */
8076 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8078 tree tem = build_int_cst (NULL_TREE,
8079 GET_MODE_BITSIZE (TYPE_MODE (type)));
8080 tem = fold_convert (TREE_TYPE (arg1), tem);
8081 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8082 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
8085 /* If we have a rotate of a bit operation with the rotate count and
8086 the second operand of the bit operation both constant,
8087 permute the two operations. */
8088 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8089 && (TREE_CODE (arg0) == BIT_AND_EXPR
8090 || TREE_CODE (arg0) == BIT_IOR_EXPR
8091 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8092 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8093 return fold (build2 (TREE_CODE (arg0), type,
8094 fold (build2 (code, type,
8095 TREE_OPERAND (arg0, 0), arg1)),
8096 fold (build2 (code, type,
8097 TREE_OPERAND (arg0, 1), arg1))));
8099 /* Two consecutive rotates adding up to the width of the mode can
8101 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8102 && TREE_CODE (arg0) == RROTATE_EXPR
8103 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8104 && TREE_INT_CST_HIGH (arg1) == 0
8105 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8106 && ((TREE_INT_CST_LOW (arg1)
8107 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8108 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8109 return TREE_OPERAND (arg0, 0);
8114 if (operand_equal_p (arg0, arg1, 0))
8115 return omit_one_operand (type, arg0, arg1);
8116 if (INTEGRAL_TYPE_P (type)
8117 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8118 return omit_one_operand (type, arg1, arg0);
8122 if (operand_equal_p (arg0, arg1, 0))
8123 return omit_one_operand (type, arg0, arg1);
8124 if (INTEGRAL_TYPE_P (type)
8125 && TYPE_MAX_VALUE (type)
8126 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8127 return omit_one_operand (type, arg1, arg0);
8130 case TRUTH_NOT_EXPR:
8131 /* The argument to invert_truthvalue must have Boolean type. */
8132 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8133 arg0 = fold_convert (boolean_type_node, arg0);
8135 /* Note that the operand of this must be an int
8136 and its values must be 0 or 1.
8137 ("true" is a fixed value perhaps depending on the language,
8138 but we don't handle values other than 1 correctly yet.) */
8139 tem = invert_truthvalue (arg0);
8140 /* Avoid infinite recursion. */
8141 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8143 return fold_convert (type, tem);
8145 case TRUTH_ANDIF_EXPR:
8146 /* Note that the operands of this must be ints
8147 and their values must be 0 or 1.
8148 ("true" is a fixed value perhaps depending on the language.) */
8149 /* If first arg is constant zero, return it. */
8150 if (integer_zerop (arg0))
8151 return fold_convert (type, arg0);
8152 case TRUTH_AND_EXPR:
8153 /* If either arg is constant true, drop it. */
8154 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8155 return non_lvalue (fold_convert (type, arg1));
8156 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8157 /* Preserve sequence points. */
8158 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8159 return non_lvalue (fold_convert (type, arg0));
8160 /* If second arg is constant zero, result is zero, but first arg
8161 must be evaluated. */
8162 if (integer_zerop (arg1))
8163 return omit_one_operand (type, arg1, arg0);
8164 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8165 case will be handled here. */
8166 if (integer_zerop (arg0))
8167 return omit_one_operand (type, arg0, arg1);
8169 /* !X && X is always false. */
8170 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8171 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8172 return omit_one_operand (type, integer_zero_node, arg1);
8173 /* X && !X is always false. */
8174 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8175 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8176 return omit_one_operand (type, integer_zero_node, arg0);
8178 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8179 means A >= Y && A != MAX, but in this case we know that
8182 if (!TREE_SIDE_EFFECTS (arg0)
8183 && !TREE_SIDE_EFFECTS (arg1))
8185 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8187 return fold (build2 (code, type, tem, arg1));
8189 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8191 return fold (build2 (code, type, arg0, tem));
8195 /* We only do these simplifications if we are optimizing. */
8199 /* Check for things like (A || B) && (A || C). We can convert this
8200 to A || (B && C). Note that either operator can be any of the four
8201 truth and/or operations and the transformation will still be
8202 valid. Also note that we only care about order for the
8203 ANDIF and ORIF operators. If B contains side effects, this
8204 might change the truth-value of A. */
8205 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8206 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8207 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8208 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8209 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8210 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8212 tree a00 = TREE_OPERAND (arg0, 0);
8213 tree a01 = TREE_OPERAND (arg0, 1);
8214 tree a10 = TREE_OPERAND (arg1, 0);
8215 tree a11 = TREE_OPERAND (arg1, 1);
8216 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8217 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8218 && (code == TRUTH_AND_EXPR
8219 || code == TRUTH_OR_EXPR));
8221 if (operand_equal_p (a00, a10, 0))
8222 return fold (build2 (TREE_CODE (arg0), type, a00,
8223 fold (build2 (code, type, a01, a11))));
8224 else if (commutative && operand_equal_p (a00, a11, 0))
8225 return fold (build2 (TREE_CODE (arg0), type, a00,
8226 fold (build2 (code, type, a01, a10))));
8227 else if (commutative && operand_equal_p (a01, a10, 0))
8228 return fold (build2 (TREE_CODE (arg0), type, a01,
8229 fold (build2 (code, type, a00, a11))));
8231 /* This case if tricky because we must either have commutative
8232 operators or else A10 must not have side-effects. */
8234 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8235 && operand_equal_p (a01, a11, 0))
8236 return fold (build2 (TREE_CODE (arg0), type,
8237 fold (build2 (code, type, a00, a10)),
8241 /* See if we can build a range comparison. */
8242 if (0 != (tem = fold_range_test (t)))
8245 /* Check for the possibility of merging component references. If our
8246 lhs is another similar operation, try to merge its rhs with our
8247 rhs. Then try to merge our lhs and rhs. */
8248 if (TREE_CODE (arg0) == code
8249 && 0 != (tem = fold_truthop (code, type,
8250 TREE_OPERAND (arg0, 1), arg1)))
8251 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8253 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8258 case TRUTH_ORIF_EXPR:
8259 /* Note that the operands of this must be ints
8260 and their values must be 0 or true.
8261 ("true" is a fixed value perhaps depending on the language.) */
8262 /* If first arg is constant true, return it. */
8263 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8264 return fold_convert (type, arg0);
8266 /* If either arg is constant zero, drop it. */
8267 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8268 return non_lvalue (fold_convert (type, arg1));
8269 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8270 /* Preserve sequence points. */
8271 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8272 return non_lvalue (fold_convert (type, arg0));
8273 /* If second arg is constant true, result is true, but we must
8274 evaluate first arg. */
8275 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8276 return omit_one_operand (type, arg1, arg0);
8277 /* Likewise for first arg, but note this only occurs here for
8279 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8280 return omit_one_operand (type, arg0, arg1);
8282 /* !X || X is always true. */
8283 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8284 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8285 return omit_one_operand (type, integer_one_node, arg1);
8286 /* X || !X is always true. */
8287 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8288 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8289 return omit_one_operand (type, integer_one_node, arg0);
8293 case TRUTH_XOR_EXPR:
8294 /* If the second arg is constant zero, drop it. */
8295 if (integer_zerop (arg1))
8296 return non_lvalue (fold_convert (type, arg0));
8297 /* If the second arg is constant true, this is a logical inversion. */
8298 if (integer_onep (arg1))
8299 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8300 /* Identical arguments cancel to zero. */
8301 if (operand_equal_p (arg0, arg1, 0))
8302 return omit_one_operand (type, integer_zero_node, arg0);
8304 /* !X ^ X is always true. */
8305 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8306 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8307 return omit_one_operand (type, integer_one_node, arg1);
8309 /* X ^ !X is always true. */
8310 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8311 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8312 return omit_one_operand (type, integer_one_node, arg0);
8322 /* If one arg is a real or integer constant, put it last. */
8323 if (tree_swap_operands_p (arg0, arg1, true))
8324 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8326 /* If this is an equality comparison of the address of a non-weak
8327 object against zero, then we know the result. */
8328 if ((code == EQ_EXPR || code == NE_EXPR)
8329 && TREE_CODE (arg0) == ADDR_EXPR
8330 && DECL_P (TREE_OPERAND (arg0, 0))
8331 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8332 && integer_zerop (arg1))
8333 return constant_boolean_node (code != EQ_EXPR, type);
8335 /* If this is an equality comparison of the address of two non-weak,
8336 unaliased symbols neither of which are extern (since we do not
8337 have access to attributes for externs), then we know the result. */
8338 if ((code == EQ_EXPR || code == NE_EXPR)
8339 && TREE_CODE (arg0) == ADDR_EXPR
8340 && DECL_P (TREE_OPERAND (arg0, 0))
8341 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8342 && ! lookup_attribute ("alias",
8343 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8344 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8345 && TREE_CODE (arg1) == ADDR_EXPR
8346 && DECL_P (TREE_OPERAND (arg1, 0))
8347 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8348 && ! lookup_attribute ("alias",
8349 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8350 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8351 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8352 ? code == EQ_EXPR : code != EQ_EXPR,
8355 /* If this is a comparison of two exprs that look like an
8356 ARRAY_REF of the same object, then we can fold this to a
8357 comparison of the two offsets. */
8358 if (COMPARISON_CLASS_P (t))
8360 tree base0, offset0, base1, offset1;
8362 if (extract_array_ref (arg0, &base0, &offset0)
8363 && extract_array_ref (arg1, &base1, &offset1)
8364 && operand_equal_p (base0, base1, 0))
8366 if (offset0 == NULL_TREE
8367 && offset1 == NULL_TREE)
8369 offset0 = integer_zero_node;
8370 offset1 = integer_zero_node;
8372 else if (offset0 == NULL_TREE)
8373 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8374 else if (offset1 == NULL_TREE)
8375 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8377 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8378 return fold (build2 (code, type, offset0, offset1));
8382 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8384 tree targ0 = strip_float_extensions (arg0);
8385 tree targ1 = strip_float_extensions (arg1);
8386 tree newtype = TREE_TYPE (targ0);
8388 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8389 newtype = TREE_TYPE (targ1);
8391 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8392 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8393 return fold (build2 (code, type, fold_convert (newtype, targ0),
8394 fold_convert (newtype, targ1)));
8396 /* (-a) CMP (-b) -> b CMP a */
8397 if (TREE_CODE (arg0) == NEGATE_EXPR
8398 && TREE_CODE (arg1) == NEGATE_EXPR)
8399 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8400 TREE_OPERAND (arg0, 0)));
8402 if (TREE_CODE (arg1) == REAL_CST)
8404 REAL_VALUE_TYPE cst;
8405 cst = TREE_REAL_CST (arg1);
8407 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8408 if (TREE_CODE (arg0) == NEGATE_EXPR)
8410 fold (build2 (swap_tree_comparison (code), type,
8411 TREE_OPERAND (arg0, 0),
8412 build_real (TREE_TYPE (arg1),
8413 REAL_VALUE_NEGATE (cst))));
8415 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8416 /* a CMP (-0) -> a CMP 0 */
8417 if (REAL_VALUE_MINUS_ZERO (cst))
8418 return fold (build2 (code, type, arg0,
8419 build_real (TREE_TYPE (arg1), dconst0)));
8421 /* x != NaN is always true, other ops are always false. */
8422 if (REAL_VALUE_ISNAN (cst)
8423 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8425 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8426 return omit_one_operand (type, tem, arg0);
8429 /* Fold comparisons against infinity. */
8430 if (REAL_VALUE_ISINF (cst))
8432 tem = fold_inf_compare (code, type, arg0, arg1);
8433 if (tem != NULL_TREE)
8438 /* If this is a comparison of a real constant with a PLUS_EXPR
8439 or a MINUS_EXPR of a real constant, we can convert it into a
8440 comparison with a revised real constant as long as no overflow
8441 occurs when unsafe_math_optimizations are enabled. */
8442 if (flag_unsafe_math_optimizations
8443 && TREE_CODE (arg1) == REAL_CST
8444 && (TREE_CODE (arg0) == PLUS_EXPR
8445 || TREE_CODE (arg0) == MINUS_EXPR)
8446 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8447 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8448 ? MINUS_EXPR : PLUS_EXPR,
8449 arg1, TREE_OPERAND (arg0, 1), 0))
8450 && ! TREE_CONSTANT_OVERFLOW (tem))
8451 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8453 /* Likewise, we can simplify a comparison of a real constant with
8454 a MINUS_EXPR whose first operand is also a real constant, i.e.
8455 (c1 - x) < c2 becomes x > c1-c2. */
8456 if (flag_unsafe_math_optimizations
8457 && TREE_CODE (arg1) == REAL_CST
8458 && TREE_CODE (arg0) == MINUS_EXPR
8459 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8460 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8462 && ! TREE_CONSTANT_OVERFLOW (tem))
8463 return fold (build2 (swap_tree_comparison (code), type,
8464 TREE_OPERAND (arg0, 1), tem));
8466 /* Fold comparisons against built-in math functions. */
8467 if (TREE_CODE (arg1) == REAL_CST
8468 && flag_unsafe_math_optimizations
8469 && ! flag_errno_math)
8471 enum built_in_function fcode = builtin_mathfn_code (arg0);
8473 if (fcode != END_BUILTINS)
8475 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8476 if (tem != NULL_TREE)
8482 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8483 if (TREE_CONSTANT (arg1)
8484 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8485 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8486 /* This optimization is invalid for ordered comparisons
8487 if CONST+INCR overflows or if foo+incr might overflow.
8488 This optimization is invalid for floating point due to rounding.
8489 For pointer types we assume overflow doesn't happen. */
8490 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8491 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8492 && (code == EQ_EXPR || code == NE_EXPR))))
8494 tree varop, newconst;
8496 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8498 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8499 arg1, TREE_OPERAND (arg0, 1)));
8500 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8501 TREE_OPERAND (arg0, 0),
8502 TREE_OPERAND (arg0, 1));
8506 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8507 arg1, TREE_OPERAND (arg0, 1)));
8508 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8509 TREE_OPERAND (arg0, 0),
8510 TREE_OPERAND (arg0, 1));
8514 /* If VAROP is a reference to a bitfield, we must mask
8515 the constant by the width of the field. */
8516 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8517 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8518 && host_integerp (DECL_SIZE (TREE_OPERAND
8519 (TREE_OPERAND (varop, 0), 1)), 1))
8521 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8522 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8523 tree folded_compare, shift;
8525 /* First check whether the comparison would come out
8526 always the same. If we don't do that we would
8527 change the meaning with the masking. */
8528 folded_compare = fold (build2 (code, type,
8529 TREE_OPERAND (varop, 0), arg1));
8530 if (integer_zerop (folded_compare)
8531 || integer_onep (folded_compare))
8532 return omit_one_operand (type, folded_compare, varop);
8534 shift = build_int_cst (NULL_TREE,
8535 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8536 shift = fold_convert (TREE_TYPE (varop), shift);
8537 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8539 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8543 return fold (build2 (code, type, varop, newconst));
8546 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8547 This transformation affects the cases which are handled in later
8548 optimizations involving comparisons with non-negative constants. */
8549 if (TREE_CODE (arg1) == INTEGER_CST
8550 && TREE_CODE (arg0) != INTEGER_CST
8551 && tree_int_cst_sgn (arg1) > 0)
8556 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8557 return fold (build2 (GT_EXPR, type, arg0, arg1));
8560 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8561 return fold (build2 (LE_EXPR, type, arg0, arg1));
8568 /* Comparisons with the highest or lowest possible integer of
8569 the specified size will have known values.
8571 This is quite similar to fold_relational_hi_lo, however,
8572 attempts to share the code have been nothing but trouble. */
8574 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8576 if (TREE_CODE (arg1) == INTEGER_CST
8577 && ! TREE_CONSTANT_OVERFLOW (arg1)
8578 && width <= 2 * HOST_BITS_PER_WIDE_INT
8579 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8580 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8582 HOST_WIDE_INT signed_max_hi;
8583 unsigned HOST_WIDE_INT signed_max_lo;
8584 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
8586 if (width <= HOST_BITS_PER_WIDE_INT)
8588 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8593 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8595 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8601 max_lo = signed_max_lo;
8602 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8608 width -= HOST_BITS_PER_WIDE_INT;
8610 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
8615 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8617 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8622 max_hi = signed_max_hi;
8623 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8627 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
8628 && TREE_INT_CST_LOW (arg1) == max_lo)
8632 return omit_one_operand (type, integer_zero_node, arg0);
8635 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8638 return omit_one_operand (type, integer_one_node, arg0);
8641 return fold (build2 (NE_EXPR, type, arg0, arg1));
8643 /* The GE_EXPR and LT_EXPR cases above are not normally
8644 reached because of previous transformations. */
8649 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8651 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
8655 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8656 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8658 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8659 return fold (build2 (NE_EXPR, type, arg0, arg1));
8663 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8665 && TREE_INT_CST_LOW (arg1) == min_lo)
8669 return omit_one_operand (type, integer_zero_node, arg0);
8672 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8675 return omit_one_operand (type, integer_one_node, arg0);
8678 return fold (build2 (NE_EXPR, type, arg0, arg1));
8683 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
8685 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
8689 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8690 return fold (build2 (NE_EXPR, type, arg0, arg1));
8692 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8693 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8698 else if (!in_gimple_form
8699 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
8700 && TREE_INT_CST_LOW (arg1) == signed_max_lo
8701 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8702 /* signed_type does not work on pointer types. */
8703 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8705 /* The following case also applies to X < signed_max+1
8706 and X >= signed_max+1 because previous transformations. */
8707 if (code == LE_EXPR || code == GT_EXPR)
8710 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8711 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8713 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8714 type, fold_convert (st0, arg0),
8715 fold_convert (st1, integer_zero_node)));
8721 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8722 a MINUS_EXPR of a constant, we can convert it into a comparison with
8723 a revised constant as long as no overflow occurs. */
8724 if ((code == EQ_EXPR || code == NE_EXPR)
8725 && TREE_CODE (arg1) == INTEGER_CST
8726 && (TREE_CODE (arg0) == PLUS_EXPR
8727 || TREE_CODE (arg0) == MINUS_EXPR)
8728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8729 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8730 ? MINUS_EXPR : PLUS_EXPR,
8731 arg1, TREE_OPERAND (arg0, 1), 0))
8732 && ! TREE_CONSTANT_OVERFLOW (tem))
8733 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8735 /* Similarly for a NEGATE_EXPR. */
8736 else if ((code == EQ_EXPR || code == NE_EXPR)
8737 && TREE_CODE (arg0) == NEGATE_EXPR
8738 && TREE_CODE (arg1) == INTEGER_CST
8739 && 0 != (tem = negate_expr (arg1))
8740 && TREE_CODE (tem) == INTEGER_CST
8741 && ! TREE_CONSTANT_OVERFLOW (tem))
8742 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8744 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8745 for !=. Don't do this for ordered comparisons due to overflow. */
8746 else if ((code == NE_EXPR || code == EQ_EXPR)
8747 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8748 return fold (build2 (code, type,
8749 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8751 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8752 && TREE_CODE (arg0) == NOP_EXPR)
8754 /* If we are widening one operand of an integer comparison,
8755 see if the other operand is similarly being widened. Perhaps we
8756 can do the comparison in the narrower type. */
8757 tem = fold_widened_comparison (code, type, arg0, arg1);
8761 /* Or if we are changing signedness. */
8762 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8767 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8768 constant, we can simplify it. */
8769 else if (TREE_CODE (arg1) == INTEGER_CST
8770 && (TREE_CODE (arg0) == MIN_EXPR
8771 || TREE_CODE (arg0) == MAX_EXPR)
8772 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8773 return optimize_minmax_comparison (t);
8775 /* If we are comparing an ABS_EXPR with a constant, we can
8776 convert all the cases into explicit comparisons, but they may
8777 well not be faster than doing the ABS and one comparison.
8778 But ABS (X) <= C is a range comparison, which becomes a subtraction
8779 and a comparison, and is probably faster. */
8780 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8781 && TREE_CODE (arg0) == ABS_EXPR
8782 && ! TREE_SIDE_EFFECTS (arg0)
8783 && (0 != (tem = negate_expr (arg1)))
8784 && TREE_CODE (tem) == INTEGER_CST
8785 && ! TREE_CONSTANT_OVERFLOW (tem))
8786 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8787 build2 (GE_EXPR, type,
8788 TREE_OPERAND (arg0, 0), tem),
8789 build2 (LE_EXPR, type,
8790 TREE_OPERAND (arg0, 0), arg1)));
8792 /* If this is an EQ or NE comparison with zero and ARG0 is
8793 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8794 two operations, but the latter can be done in one less insn
8795 on machines that have only two-operand insns or on which a
8796 constant cannot be the first operand. */
8797 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8798 && TREE_CODE (arg0) == BIT_AND_EXPR)
8800 tree arg00 = TREE_OPERAND (arg0, 0);
8801 tree arg01 = TREE_OPERAND (arg0, 1);
8802 if (TREE_CODE (arg00) == LSHIFT_EXPR
8803 && integer_onep (TREE_OPERAND (arg00, 0)))
8805 fold (build2 (code, type,
8806 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8807 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8808 arg01, TREE_OPERAND (arg00, 1)),
8809 fold_convert (TREE_TYPE (arg0),
8812 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8813 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8815 fold (build2 (code, type,
8816 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8817 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8818 arg00, TREE_OPERAND (arg01, 1)),
8819 fold_convert (TREE_TYPE (arg0),
8824 /* If this is an NE or EQ comparison of zero against the result of a
8825 signed MOD operation whose second operand is a power of 2, make
8826 the MOD operation unsigned since it is simpler and equivalent. */
8827 if ((code == NE_EXPR || code == EQ_EXPR)
8828 && integer_zerop (arg1)
8829 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8830 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8831 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8832 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8833 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8834 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8836 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8837 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8838 fold_convert (newtype,
8839 TREE_OPERAND (arg0, 0)),
8840 fold_convert (newtype,
8841 TREE_OPERAND (arg0, 1))));
8843 return fold (build2 (code, type, newmod,
8844 fold_convert (newtype, arg1)));
8847 /* If this is an NE comparison of zero with an AND of one, remove the
8848 comparison since the AND will give the correct value. */
8849 if (code == NE_EXPR && integer_zerop (arg1)
8850 && TREE_CODE (arg0) == BIT_AND_EXPR
8851 && integer_onep (TREE_OPERAND (arg0, 1)))
8852 return fold_convert (type, arg0);
8854 /* If we have (A & C) == C where C is a power of 2, convert this into
8855 (A & C) != 0. Similarly for NE_EXPR. */
8856 if ((code == EQ_EXPR || code == NE_EXPR)
8857 && TREE_CODE (arg0) == BIT_AND_EXPR
8858 && integer_pow2p (TREE_OPERAND (arg0, 1))
8859 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8860 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8861 arg0, fold_convert (TREE_TYPE (arg0),
8862 integer_zero_node)));
8864 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8865 2, then fold the expression into shifts and logical operations. */
8866 tem = fold_single_bit_test (code, arg0, arg1, type);
8870 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8871 Similarly for NE_EXPR. */
8872 if ((code == EQ_EXPR || code == NE_EXPR)
8873 && TREE_CODE (arg0) == BIT_AND_EXPR
8874 && TREE_CODE (arg1) == INTEGER_CST
8875 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8877 tree notc = fold (build1 (BIT_NOT_EXPR,
8878 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8879 TREE_OPERAND (arg0, 1)));
8880 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8882 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8883 if (integer_nonzerop (dandnotc))
8884 return omit_one_operand (type, rslt, arg0);
8887 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8888 Similarly for NE_EXPR. */
8889 if ((code == EQ_EXPR || code == NE_EXPR)
8890 && TREE_CODE (arg0) == BIT_IOR_EXPR
8891 && TREE_CODE (arg1) == INTEGER_CST
8892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8894 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8895 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8896 TREE_OPERAND (arg0, 1), notd));
8897 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8898 if (integer_nonzerop (candnotd))
8899 return omit_one_operand (type, rslt, arg0);
8902 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8903 and similarly for >= into !=. */
8904 if ((code == LT_EXPR || code == GE_EXPR)
8905 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8906 && TREE_CODE (arg1) == LSHIFT_EXPR
8907 && integer_onep (TREE_OPERAND (arg1, 0)))
8908 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8909 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8910 TREE_OPERAND (arg1, 1)),
8911 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8913 else if ((code == LT_EXPR || code == GE_EXPR)
8914 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8915 && (TREE_CODE (arg1) == NOP_EXPR
8916 || TREE_CODE (arg1) == CONVERT_EXPR)
8917 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8918 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8920 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8921 fold_convert (TREE_TYPE (arg0),
8922 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8923 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8925 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8927 /* Simplify comparison of something with itself. (For IEEE
8928 floating-point, we can only do some of these simplifications.) */
8929 if (operand_equal_p (arg0, arg1, 0))
8934 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8935 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8936 return constant_boolean_node (1, type);
8941 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8942 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8943 return constant_boolean_node (1, type);
8944 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8947 /* For NE, we can only do this simplification if integer
8948 or we don't honor IEEE floating point NaNs. */
8949 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8950 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8952 /* ... fall through ... */
8955 return constant_boolean_node (0, type);
8961 /* If we are comparing an expression that just has comparisons
8962 of two integer values, arithmetic expressions of those comparisons,
8963 and constants, we can simplify it. There are only three cases
8964 to check: the two values can either be equal, the first can be
8965 greater, or the second can be greater. Fold the expression for
8966 those three values. Since each value must be 0 or 1, we have
8967 eight possibilities, each of which corresponds to the constant 0
8968 or 1 or one of the six possible comparisons.
8970 This handles common cases like (a > b) == 0 but also handles
8971 expressions like ((x > y) - (y > x)) > 0, which supposedly
8972 occur in macroized code. */
8974 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8976 tree cval1 = 0, cval2 = 0;
8979 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8980 /* Don't handle degenerate cases here; they should already
8981 have been handled anyway. */
8982 && cval1 != 0 && cval2 != 0
8983 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8984 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8985 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8986 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8987 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8988 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8989 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8991 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8992 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8994 /* We can't just pass T to eval_subst in case cval1 or cval2
8995 was the same as ARG1. */
8998 = fold (build2 (code, type,
8999 eval_subst (arg0, cval1, maxval,
9003 = fold (build2 (code, type,
9004 eval_subst (arg0, cval1, maxval,
9008 = fold (build2 (code, type,
9009 eval_subst (arg0, cval1, minval,
9013 /* All three of these results should be 0 or 1. Confirm they
9014 are. Then use those values to select the proper code
9017 if ((integer_zerop (high_result)
9018 || integer_onep (high_result))
9019 && (integer_zerop (equal_result)
9020 || integer_onep (equal_result))
9021 && (integer_zerop (low_result)
9022 || integer_onep (low_result)))
9024 /* Make a 3-bit mask with the high-order bit being the
9025 value for `>', the next for '=', and the low for '<'. */
9026 switch ((integer_onep (high_result) * 4)
9027 + (integer_onep (equal_result) * 2)
9028 + integer_onep (low_result))
9032 return omit_one_operand (type, integer_zero_node, arg0);
9053 return omit_one_operand (type, integer_one_node, arg0);
9056 tem = build2 (code, type, cval1, cval2);
9058 return save_expr (tem);
9065 /* If this is a comparison of a field, we may be able to simplify it. */
9066 if (((TREE_CODE (arg0) == COMPONENT_REF
9067 && lang_hooks.can_use_bit_fields_p ())
9068 || TREE_CODE (arg0) == BIT_FIELD_REF)
9069 && (code == EQ_EXPR || code == NE_EXPR)
9070 /* Handle the constant case even without -O
9071 to make sure the warnings are given. */
9072 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9074 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9079 /* If this is a comparison of complex values and either or both sides
9080 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9081 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9082 This may prevent needless evaluations. */
9083 if ((code == EQ_EXPR || code == NE_EXPR)
9084 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9085 && (TREE_CODE (arg0) == COMPLEX_EXPR
9086 || TREE_CODE (arg1) == COMPLEX_EXPR
9087 || TREE_CODE (arg0) == COMPLEX_CST
9088 || TREE_CODE (arg1) == COMPLEX_CST))
9090 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9091 tree real0, imag0, real1, imag1;
9093 arg0 = save_expr (arg0);
9094 arg1 = save_expr (arg1);
9095 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
9096 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
9097 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
9098 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
9100 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9103 fold (build2 (code, type, real0, real1)),
9104 fold (build2 (code, type, imag0, imag1))));
9107 /* Optimize comparisons of strlen vs zero to a compare of the
9108 first character of the string vs zero. To wit,
9109 strlen(ptr) == 0 => *ptr == 0
9110 strlen(ptr) != 0 => *ptr != 0
9111 Other cases should reduce to one of these two (or a constant)
9112 due to the return value of strlen being unsigned. */
9113 if ((code == EQ_EXPR || code == NE_EXPR)
9114 && integer_zerop (arg1)
9115 && TREE_CODE (arg0) == CALL_EXPR)
9117 tree fndecl = get_callee_fndecl (arg0);
9121 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9122 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9123 && (arglist = TREE_OPERAND (arg0, 1))
9124 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9125 && ! TREE_CHAIN (arglist))
9126 return fold (build2 (code, type,
9127 build1 (INDIRECT_REF, char_type_node,
9128 TREE_VALUE (arglist)),
9129 fold_convert (char_type_node,
9130 integer_zero_node)));
9133 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9134 into a single range test. */
9135 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9136 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9137 && TREE_CODE (arg1) == INTEGER_CST
9138 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9139 && !integer_zerop (TREE_OPERAND (arg0, 1))
9140 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9141 && !TREE_OVERFLOW (arg1))
9143 t1 = fold_div_compare (code, type, arg0, arg1);
9144 if (t1 != NULL_TREE)
9148 if ((code == EQ_EXPR || code == NE_EXPR)
9149 && !TREE_SIDE_EFFECTS (arg0)
9150 && integer_zerop (arg1)
9151 && tree_expr_nonzero_p (arg0))
9152 return constant_boolean_node (code==NE_EXPR, type);
9154 t1 = fold_relational_const (code, type, arg0, arg1);
9155 return t1 == NULL_TREE ? t : t1;
9157 case UNORDERED_EXPR:
9165 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9167 t1 = fold_relational_const (code, type, arg0, arg1);
9168 if (t1 != NULL_TREE)
9172 /* If the first operand is NaN, the result is constant. */
9173 if (TREE_CODE (arg0) == REAL_CST
9174 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9175 && (code != LTGT_EXPR || ! flag_trapping_math))
9177 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9180 return omit_one_operand (type, t1, arg1);
9183 /* If the second operand is NaN, the result is constant. */
9184 if (TREE_CODE (arg1) == REAL_CST
9185 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9186 && (code != LTGT_EXPR || ! flag_trapping_math))
9188 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9191 return omit_one_operand (type, t1, arg0);
9194 /* Simplify unordered comparison of something with itself. */
9195 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9196 && operand_equal_p (arg0, arg1, 0))
9197 return constant_boolean_node (1, type);
9199 if (code == LTGT_EXPR
9200 && !flag_trapping_math
9201 && operand_equal_p (arg0, arg1, 0))
9202 return constant_boolean_node (0, type);
9204 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9206 tree targ0 = strip_float_extensions (arg0);
9207 tree targ1 = strip_float_extensions (arg1);
9208 tree newtype = TREE_TYPE (targ0);
9210 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9211 newtype = TREE_TYPE (targ1);
9213 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9214 return fold (build2 (code, type, fold_convert (newtype, targ0),
9215 fold_convert (newtype, targ1)));
9221 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9222 so all simple results must be passed through pedantic_non_lvalue. */
9223 if (TREE_CODE (arg0) == INTEGER_CST)
9225 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9226 /* Only optimize constant conditions when the selected branch
9227 has the same type as the COND_EXPR. This avoids optimizing
9228 away "c ? x : throw", where the throw has a void type. */
9229 if (! VOID_TYPE_P (TREE_TYPE (tem))
9230 || VOID_TYPE_P (type))
9231 return pedantic_non_lvalue (tem);
9234 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9235 return pedantic_omit_one_operand (type, arg1, arg0);
9237 /* If we have A op B ? A : C, we may be able to convert this to a
9238 simpler expression, depending on the operation and the values
9239 of B and C. Signed zeros prevent all of these transformations,
9240 for reasons given above each one.
9242 Also try swapping the arguments and inverting the conditional. */
9243 if (COMPARISON_CLASS_P (arg0)
9244 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9245 arg1, TREE_OPERAND (arg0, 1))
9246 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9248 tem = fold_cond_expr_with_comparison (type, arg0,
9249 TREE_OPERAND (t, 1),
9250 TREE_OPERAND (t, 2));
9255 if (COMPARISON_CLASS_P (arg0)
9256 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9257 TREE_OPERAND (t, 2),
9258 TREE_OPERAND (arg0, 1))
9259 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9261 tem = invert_truthvalue (arg0);
9262 if (COMPARISON_CLASS_P (tem))
9264 tem = fold_cond_expr_with_comparison (type, tem,
9265 TREE_OPERAND (t, 2),
9266 TREE_OPERAND (t, 1));
9272 /* If the second operand is simpler than the third, swap them
9273 since that produces better jump optimization results. */
9274 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9275 TREE_OPERAND (t, 2), false))
9277 /* See if this can be inverted. If it can't, possibly because
9278 it was a floating-point inequality comparison, don't do
9280 tem = invert_truthvalue (arg0);
9282 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9283 return fold (build3 (code, type, tem,
9284 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9287 /* Convert A ? 1 : 0 to simply A. */
9288 if (integer_onep (TREE_OPERAND (t, 1))
9289 && integer_zerop (TREE_OPERAND (t, 2))
9290 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9291 call to fold will try to move the conversion inside
9292 a COND, which will recurse. In that case, the COND_EXPR
9293 is probably the best choice, so leave it alone. */
9294 && type == TREE_TYPE (arg0))
9295 return pedantic_non_lvalue (arg0);
9297 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9298 over COND_EXPR in cases such as floating point comparisons. */
9299 if (integer_zerop (TREE_OPERAND (t, 1))
9300 && integer_onep (TREE_OPERAND (t, 2))
9301 && truth_value_p (TREE_CODE (arg0)))
9302 return pedantic_non_lvalue (fold_convert (type,
9303 invert_truthvalue (arg0)));
9305 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9306 if (TREE_CODE (arg0) == LT_EXPR
9307 && integer_zerop (TREE_OPERAND (arg0, 1))
9308 && integer_zerop (TREE_OPERAND (t, 2))
9309 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9310 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9311 TREE_TYPE (tem), tem, arg1)));
9313 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9314 already handled above. */
9315 if (TREE_CODE (arg0) == BIT_AND_EXPR
9316 && integer_onep (TREE_OPERAND (arg0, 1))
9317 && integer_zerop (TREE_OPERAND (t, 2))
9318 && integer_pow2p (arg1))
9320 tree tem = TREE_OPERAND (arg0, 0);
9322 if (TREE_CODE (tem) == RSHIFT_EXPR
9323 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9324 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9325 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9326 return fold (build2 (BIT_AND_EXPR, type,
9327 TREE_OPERAND (tem, 0), arg1));
9330 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9331 is probably obsolete because the first operand should be a
9332 truth value (that's why we have the two cases above), but let's
9333 leave it in until we can confirm this for all front-ends. */
9334 if (integer_zerop (TREE_OPERAND (t, 2))
9335 && TREE_CODE (arg0) == NE_EXPR
9336 && integer_zerop (TREE_OPERAND (arg0, 1))
9337 && integer_pow2p (arg1)
9338 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9339 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9340 arg1, OEP_ONLY_CONST))
9341 return pedantic_non_lvalue (fold_convert (type,
9342 TREE_OPERAND (arg0, 0)));
9344 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9345 if (integer_zerop (TREE_OPERAND (t, 2))
9346 && truth_value_p (TREE_CODE (arg0))
9347 && truth_value_p (TREE_CODE (arg1)))
9348 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9350 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9351 if (integer_onep (TREE_OPERAND (t, 2))
9352 && truth_value_p (TREE_CODE (arg0))
9353 && truth_value_p (TREE_CODE (arg1)))
9355 /* Only perform transformation if ARG0 is easily inverted. */
9356 tem = invert_truthvalue (arg0);
9357 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9358 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9361 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9362 if (integer_zerop (arg1)
9363 && truth_value_p (TREE_CODE (arg0))
9364 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9366 /* Only perform transformation if ARG0 is easily inverted. */
9367 tem = invert_truthvalue (arg0);
9368 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9369 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9370 TREE_OPERAND (t, 2)));
9373 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9374 if (integer_onep (arg1)
9375 && truth_value_p (TREE_CODE (arg0))
9376 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9377 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9378 TREE_OPERAND (t, 2)));
9383 /* When pedantic, a compound expression can be neither an lvalue
9384 nor an integer constant expression. */
9385 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9387 /* Don't let (0, 0) be null pointer constant. */
9388 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9389 : fold_convert (type, arg1);
9390 return pedantic_non_lvalue (tem);
9394 return build_complex (type, arg0, arg1);
9398 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9400 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9401 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9402 TREE_OPERAND (arg0, 1));
9403 else if (TREE_CODE (arg0) == COMPLEX_CST)
9404 return TREE_REALPART (arg0);
9405 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9406 return fold (build2 (TREE_CODE (arg0), type,
9407 fold (build1 (REALPART_EXPR, type,
9408 TREE_OPERAND (arg0, 0))),
9409 fold (build1 (REALPART_EXPR, type,
9410 TREE_OPERAND (arg0, 1)))));
9414 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9415 return fold_convert (type, integer_zero_node);
9416 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9417 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9418 TREE_OPERAND (arg0, 0));
9419 else if (TREE_CODE (arg0) == COMPLEX_CST)
9420 return TREE_IMAGPART (arg0);
9421 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9422 return fold (build2 (TREE_CODE (arg0), type,
9423 fold (build1 (IMAGPART_EXPR, type,
9424 TREE_OPERAND (arg0, 0))),
9425 fold (build1 (IMAGPART_EXPR, type,
9426 TREE_OPERAND (arg0, 1)))));
9430 /* Check for a built-in function. */
9431 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9432 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9434 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9436 tree tmp = fold_builtin (t, false);
9444 } /* switch (code) */
9447 #ifdef ENABLE_FOLD_CHECKING
9450 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9451 static void fold_check_failed (tree, tree);
9452 void print_fold_checksum (tree);
9454 /* When --enable-checking=fold, compute a digest of expr before
9455 and after actual fold call to see if fold did not accidentally
9456 change original expr. */
9463 unsigned char checksum_before[16], checksum_after[16];
9466 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9467 md5_init_ctx (&ctx);
9468 fold_checksum_tree (expr, &ctx, ht);
9469 md5_finish_ctx (&ctx, checksum_before);
9472 ret = fold_1 (expr);
9474 md5_init_ctx (&ctx);
9475 fold_checksum_tree (expr, &ctx, ht);
9476 md5_finish_ctx (&ctx, checksum_after);
9479 if (memcmp (checksum_before, checksum_after, 16))
9480 fold_check_failed (expr, ret);
9486 print_fold_checksum (tree expr)
9489 unsigned char checksum[16], cnt;
9492 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9493 md5_init_ctx (&ctx);
9494 fold_checksum_tree (expr, &ctx, ht);
9495 md5_finish_ctx (&ctx, checksum);
9497 for (cnt = 0; cnt < 16; ++cnt)
9498 fprintf (stderr, "%02x", checksum[cnt]);
9499 putc ('\n', stderr);
9503 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9505 internal_error ("fold check: original tree changed by fold");
9509 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9512 enum tree_code code;
9513 char buf[sizeof (struct tree_decl)];
9516 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9517 <= sizeof (struct tree_decl))
9518 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9521 slot = htab_find_slot (ht, expr, INSERT);
9525 code = TREE_CODE (expr);
9526 if (TREE_CODE_CLASS (code) == tcc_declaration
9527 && DECL_ASSEMBLER_NAME_SET_P (expr))
9529 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9530 memcpy (buf, expr, tree_size (expr));
9532 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9534 else if (TREE_CODE_CLASS (code) == tcc_type
9535 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9536 || TYPE_CACHED_VALUES_P (expr)))
9538 /* Allow these fields to be modified. */
9539 memcpy (buf, expr, tree_size (expr));
9541 TYPE_POINTER_TO (expr) = NULL;
9542 TYPE_REFERENCE_TO (expr) = NULL;
9543 TYPE_CACHED_VALUES_P (expr) = 0;
9544 TYPE_CACHED_VALUES (expr) = NULL;
9546 md5_process_bytes (expr, tree_size (expr), ctx);
9547 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9548 if (TREE_CODE_CLASS (code) != tcc_type
9549 && TREE_CODE_CLASS (code) != tcc_declaration)
9550 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9551 switch (TREE_CODE_CLASS (code))
9557 md5_process_bytes (TREE_STRING_POINTER (expr),
9558 TREE_STRING_LENGTH (expr), ctx);
9561 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9562 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9565 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9571 case tcc_exceptional:
9575 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9576 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9579 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9580 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9586 case tcc_expression:
9588 case tcc_comparison:
9592 len = TREE_CODE_LENGTH (code);
9593 for (i = 0; i < len; ++i)
9594 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9596 case tcc_declaration:
9597 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9598 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9599 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9600 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9601 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9602 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9603 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9604 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9605 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9606 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9607 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9610 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9611 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9612 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9613 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9614 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9615 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9616 if (INTEGRAL_TYPE_P (expr)
9617 || SCALAR_FLOAT_TYPE_P (expr))
9619 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9620 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9622 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9623 if (TREE_CODE (expr) == RECORD_TYPE
9624 || TREE_CODE (expr) == UNION_TYPE
9625 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9626 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9627 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9636 /* Perform constant folding and related simplification of initializer
9637 expression EXPR. This behaves identically to "fold" but ignores
9638 potential run-time traps and exceptions that fold must preserve. */
9641 fold_initializer (tree expr)
9643 int saved_signaling_nans = flag_signaling_nans;
9644 int saved_trapping_math = flag_trapping_math;
9645 int saved_rounding_math = flag_rounding_math;
9646 int saved_trapv = flag_trapv;
9649 flag_signaling_nans = 0;
9650 flag_trapping_math = 0;
9651 flag_rounding_math = 0;
9654 result = fold (expr);
9656 flag_signaling_nans = saved_signaling_nans;
9657 flag_trapping_math = saved_trapping_math;
9658 flag_rounding_math = saved_rounding_math;
9659 flag_trapv = saved_trapv;
9664 /* Determine if first argument is a multiple of second argument. Return 0 if
9665 it is not, or we cannot easily determined it to be.
9667 An example of the sort of thing we care about (at this point; this routine
9668 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9669 fold cases do now) is discovering that
9671 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9677 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9679 This code also handles discovering that
9681 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9683 is a multiple of 8 so we don't have to worry about dealing with a
9686 Note that we *look* inside a SAVE_EXPR only to determine how it was
9687 calculated; it is not safe for fold to do much of anything else with the
9688 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9689 at run time. For example, the latter example above *cannot* be implemented
9690 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9691 evaluation time of the original SAVE_EXPR is not necessarily the same at
9692 the time the new expression is evaluated. The only optimization of this
9693 sort that would be valid is changing
9695 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9699 SAVE_EXPR (I) * SAVE_EXPR (J)
9701 (where the same SAVE_EXPR (J) is used in the original and the
9702 transformed version). */
9705 multiple_of_p (tree type, tree top, tree bottom)
9707 if (operand_equal_p (top, bottom, 0))
9710 if (TREE_CODE (type) != INTEGER_TYPE)
9713 switch (TREE_CODE (top))
9716 /* Bitwise and provides a power of two multiple. If the mask is
9717 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
9718 if (!integer_pow2p (bottom))
9723 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9724 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9728 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9729 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9732 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9736 op1 = TREE_OPERAND (top, 1);
9737 /* const_binop may not detect overflow correctly,
9738 so check for it explicitly here. */
9739 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9740 > TREE_INT_CST_LOW (op1)
9741 && TREE_INT_CST_HIGH (op1) == 0
9742 && 0 != (t1 = fold_convert (type,
9743 const_binop (LSHIFT_EXPR,
9746 && ! TREE_OVERFLOW (t1))
9747 return multiple_of_p (type, t1, bottom);
9752 /* Can't handle conversions from non-integral or wider integral type. */
9753 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9754 || (TYPE_PRECISION (type)
9755 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9758 /* .. fall through ... */
9761 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9764 if (TREE_CODE (bottom) != INTEGER_CST
9765 || (TYPE_UNSIGNED (type)
9766 && (tree_int_cst_sgn (top) < 0
9767 || tree_int_cst_sgn (bottom) < 0)))
9769 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9777 /* Return true if `t' is known to be non-negative. */
9780 tree_expr_nonnegative_p (tree t)
9782 switch (TREE_CODE (t))
9788 return tree_int_cst_sgn (t) >= 0;
9791 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9794 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9795 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9796 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9798 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9799 both unsigned and at least 2 bits shorter than the result. */
9800 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9801 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9802 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9804 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9805 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9806 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9807 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9809 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9810 TYPE_PRECISION (inner2)) + 1;
9811 return prec < TYPE_PRECISION (TREE_TYPE (t));
9817 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9819 /* x * x for floating point x is always non-negative. */
9820 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9822 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9823 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9826 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9827 both unsigned and their total bits is shorter than the result. */
9828 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9829 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9830 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9832 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9833 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9834 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9835 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9836 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9837 < TYPE_PRECISION (TREE_TYPE (t));
9841 case TRUNC_DIV_EXPR:
9843 case FLOOR_DIV_EXPR:
9844 case ROUND_DIV_EXPR:
9845 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9846 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9848 case TRUNC_MOD_EXPR:
9850 case FLOOR_MOD_EXPR:
9851 case ROUND_MOD_EXPR:
9852 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9855 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9856 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9859 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9860 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9863 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9864 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9868 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9869 tree outer_type = TREE_TYPE (t);
9871 if (TREE_CODE (outer_type) == REAL_TYPE)
9873 if (TREE_CODE (inner_type) == REAL_TYPE)
9874 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9875 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9877 if (TYPE_UNSIGNED (inner_type))
9879 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9882 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9884 if (TREE_CODE (inner_type) == REAL_TYPE)
9885 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9886 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9887 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9888 && TYPE_UNSIGNED (inner_type);
9894 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9895 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9897 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9899 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9900 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9902 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9903 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9905 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9907 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9909 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9910 case NON_LVALUE_EXPR:
9911 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9913 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9917 tree temp = TARGET_EXPR_SLOT (t);
9918 t = TARGET_EXPR_INITIAL (t);
9920 /* If the initializer is non-void, then it's a normal expression
9921 that will be assigned to the slot. */
9922 if (!VOID_TYPE_P (t))
9923 return tree_expr_nonnegative_p (t);
9925 /* Otherwise, the initializer sets the slot in some way. One common
9926 way is an assignment statement at the end of the initializer. */
9929 if (TREE_CODE (t) == BIND_EXPR)
9930 t = expr_last (BIND_EXPR_BODY (t));
9931 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9932 || TREE_CODE (t) == TRY_CATCH_EXPR)
9933 t = expr_last (TREE_OPERAND (t, 0));
9934 else if (TREE_CODE (t) == STATEMENT_LIST)
9939 if (TREE_CODE (t) == MODIFY_EXPR
9940 && TREE_OPERAND (t, 0) == temp)
9941 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9948 tree fndecl = get_callee_fndecl (t);
9949 tree arglist = TREE_OPERAND (t, 1);
9950 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9951 switch (DECL_FUNCTION_CODE (fndecl))
9953 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9954 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9955 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9956 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9958 CASE_BUILTIN_F (BUILT_IN_ACOS)
9959 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9960 CASE_BUILTIN_F (BUILT_IN_CABS)
9961 CASE_BUILTIN_F (BUILT_IN_COSH)
9962 CASE_BUILTIN_F (BUILT_IN_ERFC)
9963 CASE_BUILTIN_F (BUILT_IN_EXP)
9964 CASE_BUILTIN_F (BUILT_IN_EXP10)
9965 CASE_BUILTIN_F (BUILT_IN_EXP2)
9966 CASE_BUILTIN_F (BUILT_IN_FABS)
9967 CASE_BUILTIN_F (BUILT_IN_FDIM)
9968 CASE_BUILTIN_F (BUILT_IN_FREXP)
9969 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9970 CASE_BUILTIN_F (BUILT_IN_POW10)
9971 CASE_BUILTIN_I (BUILT_IN_FFS)
9972 CASE_BUILTIN_I (BUILT_IN_PARITY)
9973 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9977 CASE_BUILTIN_F (BUILT_IN_SQRT)
9978 /* sqrt(-0.0) is -0.0. */
9979 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9981 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9983 CASE_BUILTIN_F (BUILT_IN_ASINH)
9984 CASE_BUILTIN_F (BUILT_IN_ATAN)
9985 CASE_BUILTIN_F (BUILT_IN_ATANH)
9986 CASE_BUILTIN_F (BUILT_IN_CBRT)
9987 CASE_BUILTIN_F (BUILT_IN_CEIL)
9988 CASE_BUILTIN_F (BUILT_IN_ERF)
9989 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9990 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9991 CASE_BUILTIN_F (BUILT_IN_FMOD)
9992 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9993 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9994 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9995 CASE_BUILTIN_F (BUILT_IN_LRINT)
9996 CASE_BUILTIN_F (BUILT_IN_LROUND)
9997 CASE_BUILTIN_F (BUILT_IN_MODF)
9998 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9999 CASE_BUILTIN_F (BUILT_IN_POW)
10000 CASE_BUILTIN_F (BUILT_IN_RINT)
10001 CASE_BUILTIN_F (BUILT_IN_ROUND)
10002 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10003 CASE_BUILTIN_F (BUILT_IN_SINH)
10004 CASE_BUILTIN_F (BUILT_IN_TANH)
10005 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10006 /* True if the 1st argument is nonnegative. */
10007 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10009 CASE_BUILTIN_F (BUILT_IN_FMAX)
10010 /* True if the 1st OR 2nd arguments are nonnegative. */
10011 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10012 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10014 CASE_BUILTIN_F (BUILT_IN_FMIN)
10015 /* True if the 1st AND 2nd arguments are nonnegative. */
10016 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10017 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10019 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10020 /* True if the 2nd argument is nonnegative. */
10021 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10025 #undef CASE_BUILTIN_F
10026 #undef CASE_BUILTIN_I
10030 /* ... fall through ... */
10033 if (truth_value_p (TREE_CODE (t)))
10034 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10038 /* We don't know sign of `t', so be conservative and return false. */
10042 /* Return true when T is an address and is known to be nonzero.
10043 For floating point we further ensure that T is not denormal.
10044 Similar logic is present in nonzero_address in rtlanal.h. */
10047 tree_expr_nonzero_p (tree t)
10049 tree type = TREE_TYPE (t);
10051 /* Doing something useful for floating point would need more work. */
10052 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10055 switch (TREE_CODE (t))
10058 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10059 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10062 /* We used to test for !integer_zerop here. This does not work correctly
10063 if TREE_CONSTANT_OVERFLOW (t). */
10064 return (TREE_INT_CST_LOW (t) != 0
10065 || TREE_INT_CST_HIGH (t) != 0);
10068 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10070 /* With the presence of negative values it is hard
10071 to say something. */
10072 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10073 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10075 /* One of operands must be positive and the other non-negative. */
10076 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10077 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10082 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10084 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10085 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10091 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10092 tree outer_type = TREE_TYPE (t);
10094 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10095 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10101 tree base = get_base_address (TREE_OPERAND (t, 0));
10106 /* Weak declarations may link to NULL. */
10108 return !DECL_WEAK (base);
10110 /* Constants are never weak. */
10111 if (CONSTANT_CLASS_P (base))
10118 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10119 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10122 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10123 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10126 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10128 /* When both operands are nonzero, then MAX must be too. */
10129 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10132 /* MAX where operand 0 is positive is positive. */
10133 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10135 /* MAX where operand 1 is positive is positive. */
10136 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10137 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10141 case COMPOUND_EXPR:
10144 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10147 case NON_LVALUE_EXPR:
10148 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10151 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10152 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10160 /* See if we are applying CODE, a relational to the highest or lowest
10161 possible integer of TYPE. If so, then the result is a compile
10165 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10170 enum tree_code code = *code_p;
10171 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10173 if (TREE_CODE (op1) == INTEGER_CST
10174 && ! TREE_CONSTANT_OVERFLOW (op1)
10175 && width <= HOST_BITS_PER_WIDE_INT
10176 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10177 || POINTER_TYPE_P (TREE_TYPE (op1))))
10179 unsigned HOST_WIDE_INT signed_max;
10180 unsigned HOST_WIDE_INT max, min;
10182 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10184 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10186 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10192 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10195 if (TREE_INT_CST_HIGH (op1) == 0
10196 && TREE_INT_CST_LOW (op1) == max)
10200 return omit_one_operand (type, integer_zero_node, op0);
10206 return omit_one_operand (type, integer_one_node, op0);
10212 /* The GE_EXPR and LT_EXPR cases above are not normally
10213 reached because of previous transformations. */
10218 else if (TREE_INT_CST_HIGH (op1) == 0
10219 && TREE_INT_CST_LOW (op1) == max - 1)
10224 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10228 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10233 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10234 && TREE_INT_CST_LOW (op1) == min)
10238 return omit_one_operand (type, integer_zero_node, op0);
10245 return omit_one_operand (type, integer_one_node, op0);
10254 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10255 && TREE_INT_CST_LOW (op1) == min + 1)
10260 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10264 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10270 else if (TREE_INT_CST_HIGH (op1) == 0
10271 && TREE_INT_CST_LOW (op1) == signed_max
10272 && TYPE_UNSIGNED (TREE_TYPE (op1))
10273 /* signed_type does not work on pointer types. */
10274 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10276 /* The following case also applies to X < signed_max+1
10277 and X >= signed_max+1 because previous transformations. */
10278 if (code == LE_EXPR || code == GT_EXPR)
10280 tree st0, st1, exp, retval;
10281 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10282 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10284 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10286 fold_convert (st0, op0),
10287 fold_convert (st1, integer_zero_node));
10289 retval = fold_binary_to_constant (TREE_CODE (exp),
10291 TREE_OPERAND (exp, 0),
10292 TREE_OPERAND (exp, 1));
10294 /* If we are in gimple form, then returning EXP would create
10295 non-gimple expressions. Clearing it is safe and insures
10296 we do not allow a non-gimple expression to escape. */
10297 if (in_gimple_form)
10300 return (retval ? retval : exp);
10309 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10310 attempt to fold the expression to a constant without modifying TYPE,
10313 If the expression could be simplified to a constant, then return
10314 the constant. If the expression would not be simplified to a
10315 constant, then return NULL_TREE.
10317 Note this is primarily designed to be called after gimplification
10318 of the tree structures and when at least one operand is a constant.
10319 As a result of those simplifying assumptions this routine is far
10320 simpler than the generic fold routine. */
10323 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10330 /* If this is a commutative operation, and ARG0 is a constant, move it
10331 to ARG1 to reduce the number of tests below. */
10332 if (commutative_tree_code (code)
10333 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10340 /* If either operand is a complex type, extract its real component. */
10341 if (TREE_CODE (op0) == COMPLEX_CST)
10342 subop0 = TREE_REALPART (op0);
10346 if (TREE_CODE (op1) == COMPLEX_CST)
10347 subop1 = TREE_REALPART (op1);
10351 /* Note if either argument is not a real or integer constant.
10352 With a few exceptions, simplification is limited to cases
10353 where both arguments are constants. */
10354 if ((TREE_CODE (subop0) != INTEGER_CST
10355 && TREE_CODE (subop0) != REAL_CST)
10356 || (TREE_CODE (subop1) != INTEGER_CST
10357 && TREE_CODE (subop1) != REAL_CST))
10363 /* (plus (address) (const_int)) is a constant. */
10364 if (TREE_CODE (op0) == PLUS_EXPR
10365 && TREE_CODE (op1) == INTEGER_CST
10366 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10367 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10368 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10370 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10372 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10373 const_binop (PLUS_EXPR, op1,
10374 TREE_OPERAND (op0, 1), 0));
10382 /* Both arguments are constants. Simplify. */
10383 tem = const_binop (code, op0, op1, 0);
10384 if (tem != NULL_TREE)
10386 /* The return value should always have the same type as
10387 the original expression. */
10388 if (TREE_TYPE (tem) != type)
10389 tem = fold_convert (type, tem);
10396 /* Fold &x - &x. This can happen from &x.foo - &x.
10397 This is unsafe for certain floats even in non-IEEE formats.
10398 In IEEE, it is unsafe because it does wrong for NaNs.
10399 Also note that operand_equal_p is always false if an
10400 operand is volatile. */
10401 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10402 return fold_convert (type, integer_zero_node);
10408 /* Special case multiplication or bitwise AND where one argument
10410 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10411 return omit_one_operand (type, op1, op0);
10413 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10414 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10415 && real_zerop (op1))
10416 return omit_one_operand (type, op1, op0);
10421 /* Special case when we know the result will be all ones. */
10422 if (integer_all_onesp (op1))
10423 return omit_one_operand (type, op1, op0);
10427 case TRUNC_DIV_EXPR:
10428 case ROUND_DIV_EXPR:
10429 case FLOOR_DIV_EXPR:
10430 case CEIL_DIV_EXPR:
10431 case EXACT_DIV_EXPR:
10432 case TRUNC_MOD_EXPR:
10433 case ROUND_MOD_EXPR:
10434 case FLOOR_MOD_EXPR:
10435 case CEIL_MOD_EXPR:
10437 /* Division by zero is undefined. */
10438 if (integer_zerop (op1))
10441 if (TREE_CODE (op1) == REAL_CST
10442 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10443 && real_zerop (op1))
10449 if (INTEGRAL_TYPE_P (type)
10450 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10451 return omit_one_operand (type, op1, op0);
10456 if (INTEGRAL_TYPE_P (type)
10457 && TYPE_MAX_VALUE (type)
10458 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10459 return omit_one_operand (type, op1, op0);
10464 /* Optimize -1 >> x for arithmetic right shifts. */
10465 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10466 return omit_one_operand (type, op0, op1);
10467 /* ... fall through ... */
10470 if (integer_zerop (op0))
10471 return omit_one_operand (type, op0, op1);
10473 /* Since negative shift count is not well-defined, don't
10474 try to compute it in the compiler. */
10475 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10482 /* -1 rotated either direction by any amount is still -1. */
10483 if (integer_all_onesp (op0))
10484 return omit_one_operand (type, op0, op1);
10486 /* 0 rotated either direction by any amount is still zero. */
10487 if (integer_zerop (op0))
10488 return omit_one_operand (type, op0, op1);
10494 return build_complex (type, op0, op1);
10503 /* If one arg is a real or integer constant, put it last. */
10504 if ((TREE_CODE (op0) == INTEGER_CST
10505 && TREE_CODE (op1) != INTEGER_CST)
10506 || (TREE_CODE (op0) == REAL_CST
10507 && TREE_CODE (op0) != REAL_CST))
10514 code = swap_tree_comparison (code);
10517 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10518 This transformation affects the cases which are handled in later
10519 optimizations involving comparisons with non-negative constants. */
10520 if (TREE_CODE (op1) == INTEGER_CST
10521 && TREE_CODE (op0) != INTEGER_CST
10522 && tree_int_cst_sgn (op1) > 0)
10528 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10533 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10541 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10545 /* Fall through. */
10548 case UNORDERED_EXPR:
10558 return fold_relational_const (code, type, op0, op1);
10561 /* This could probably be handled. */
10564 case TRUTH_AND_EXPR:
10565 /* If second arg is constant zero, result is zero, but first arg
10566 must be evaluated. */
10567 if (integer_zerop (op1))
10568 return omit_one_operand (type, op1, op0);
10569 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10570 case will be handled here. */
10571 if (integer_zerop (op0))
10572 return omit_one_operand (type, op0, op1);
10573 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10574 return constant_boolean_node (true, type);
10577 case TRUTH_OR_EXPR:
10578 /* If second arg is constant true, result is true, but we must
10579 evaluate first arg. */
10580 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10581 return omit_one_operand (type, op1, op0);
10582 /* Likewise for first arg, but note this only occurs here for
10584 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10585 return omit_one_operand (type, op0, op1);
10586 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10587 return constant_boolean_node (false, type);
10590 case TRUTH_XOR_EXPR:
10591 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10593 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10594 return constant_boolean_node (x, type);
10603 /* Given the components of a unary expression CODE, TYPE and OP0,
10604 attempt to fold the expression to a constant without modifying
10607 If the expression could be simplified to a constant, then return
10608 the constant. If the expression would not be simplified to a
10609 constant, then return NULL_TREE.
10611 Note this is primarily designed to be called after gimplification
10612 of the tree structures and when op0 is a constant. As a result
10613 of those simplifying assumptions this routine is far simpler than
10614 the generic fold routine. */
10617 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10619 /* Make sure we have a suitable constant argument. */
10620 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10624 if (TREE_CODE (op0) == COMPLEX_CST)
10625 subop = TREE_REALPART (op0);
10629 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10638 case FIX_TRUNC_EXPR:
10639 case FIX_FLOOR_EXPR:
10640 case FIX_CEIL_EXPR:
10641 return fold_convert_const (code, type, op0);
10644 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10645 return fold_negate_const (op0, type);
10650 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10651 return fold_abs_const (op0, type);
10656 if (TREE_CODE (op0) == INTEGER_CST)
10657 return fold_not_const (op0, type);
10661 case REALPART_EXPR:
10662 if (TREE_CODE (op0) == COMPLEX_CST)
10663 return TREE_REALPART (op0);
10667 case IMAGPART_EXPR:
10668 if (TREE_CODE (op0) == COMPLEX_CST)
10669 return TREE_IMAGPART (op0);
10674 if (TREE_CODE (op0) == COMPLEX_CST
10675 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10676 return build_complex (type, TREE_REALPART (op0),
10677 negate_expr (TREE_IMAGPART (op0)));
10685 /* If EXP represents referencing an element in a constant string
10686 (either via pointer arithmetic or array indexing), return the
10687 tree representing the value accessed, otherwise return NULL. */
10690 fold_read_from_constant_string (tree exp)
10692 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10694 tree exp1 = TREE_OPERAND (exp, 0);
10698 if (TREE_CODE (exp) == INDIRECT_REF)
10699 string = string_constant (exp1, &index);
10702 tree low_bound = array_ref_low_bound (exp);
10703 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10705 /* Optimize the special-case of a zero lower bound.
10707 We convert the low_bound to sizetype to avoid some problems
10708 with constant folding. (E.g. suppose the lower bound is 1,
10709 and its mode is QI. Without the conversion,l (ARRAY
10710 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10711 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10712 if (! integer_zerop (low_bound))
10713 index = size_diffop (index, fold_convert (sizetype, low_bound));
10719 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10720 && TREE_CODE (string) == STRING_CST
10721 && TREE_CODE (index) == INTEGER_CST
10722 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10723 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10725 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10726 return fold_convert (TREE_TYPE (exp),
10727 build_int_cst (NULL_TREE,
10728 (TREE_STRING_POINTER (string)
10729 [TREE_INT_CST_LOW (index)])));
10734 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10735 an integer constant or real constant.
10737 TYPE is the type of the result. */
10740 fold_negate_const (tree arg0, tree type)
10742 tree t = NULL_TREE;
10744 switch (TREE_CODE (arg0))
10748 unsigned HOST_WIDE_INT low;
10749 HOST_WIDE_INT high;
10750 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10751 TREE_INT_CST_HIGH (arg0),
10753 t = build_int_cst_wide (type, low, high);
10754 t = force_fit_type (t, 1,
10755 (overflow | TREE_OVERFLOW (arg0))
10756 && !TYPE_UNSIGNED (type),
10757 TREE_CONSTANT_OVERFLOW (arg0));
10762 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10766 gcc_unreachable ();
10772 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10773 an integer constant or real constant.
10775 TYPE is the type of the result. */
10778 fold_abs_const (tree arg0, tree type)
10780 tree t = NULL_TREE;
10782 switch (TREE_CODE (arg0))
10785 /* If the value is unsigned, then the absolute value is
10786 the same as the ordinary value. */
10787 if (TYPE_UNSIGNED (type))
10789 /* Similarly, if the value is non-negative. */
10790 else if (INT_CST_LT (integer_minus_one_node, arg0))
10792 /* If the value is negative, then the absolute value is
10796 unsigned HOST_WIDE_INT low;
10797 HOST_WIDE_INT high;
10798 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10799 TREE_INT_CST_HIGH (arg0),
10801 t = build_int_cst_wide (type, low, high);
10802 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10803 TREE_CONSTANT_OVERFLOW (arg0));
10808 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10809 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10815 gcc_unreachable ();
10821 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10822 constant. TYPE is the type of the result. */
10825 fold_not_const (tree arg0, tree type)
10827 tree t = NULL_TREE;
10829 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10831 t = build_int_cst_wide (type,
10832 ~ TREE_INT_CST_LOW (arg0),
10833 ~ TREE_INT_CST_HIGH (arg0));
10834 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10835 TREE_CONSTANT_OVERFLOW (arg0));
10840 /* Given CODE, a relational operator, the target type, TYPE and two
10841 constant operands OP0 and OP1, return the result of the
10842 relational operation. If the result is not a compile time
10843 constant, then return NULL_TREE. */
10846 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10848 int result, invert;
10850 /* From here on, the only cases we handle are when the result is
10851 known to be a constant. */
10853 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10855 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10856 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10858 /* Handle the cases where either operand is a NaN. */
10859 if (real_isnan (c0) || real_isnan (c1))
10869 case UNORDERED_EXPR:
10883 if (flag_trapping_math)
10889 gcc_unreachable ();
10892 return constant_boolean_node (result, type);
10895 return constant_boolean_node (real_compare (code, c0, c1), type);
10898 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10900 To compute GT, swap the arguments and do LT.
10901 To compute GE, do LT and invert the result.
10902 To compute LE, swap the arguments, do LT and invert the result.
10903 To compute NE, do EQ and invert the result.
10905 Therefore, the code below must handle only EQ and LT. */
10907 if (code == LE_EXPR || code == GT_EXPR)
10912 code = swap_tree_comparison (code);
10915 /* Note that it is safe to invert for real values here because we
10916 have already handled the one case that it matters. */
10919 if (code == NE_EXPR || code == GE_EXPR)
10922 code = invert_tree_comparison (code, false);
10925 /* Compute a result for LT or EQ if args permit;
10926 Otherwise return T. */
10927 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10929 if (code == EQ_EXPR)
10930 result = tree_int_cst_equal (op0, op1);
10931 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10932 result = INT_CST_LT_UNSIGNED (op0, op1);
10934 result = INT_CST_LT (op0, op1);
10941 return constant_boolean_node (result, type);
10944 /* Build an expression for the a clean point containing EXPR with type TYPE.
10945 Don't build a cleanup point expression for EXPR which don't have side
10949 fold_build_cleanup_point_expr (tree type, tree expr)
10951 /* If the expression does not have side effects then we don't have to wrap
10952 it with a cleanup point expression. */
10953 if (!TREE_SIDE_EFFECTS (expr))
10956 /* If the expression is a return, check to see if the expression inside the
10957 return has no side effects or the right hand side of the modify expression
10958 inside the return. If either don't have side effects set we don't need to
10959 wrap the expression in a cleanup point expression. Note we don't check the
10960 left hand side of the modify because it should always be a return decl. */
10961 if (TREE_CODE (expr) == RETURN_EXPR)
10963 tree op = TREE_OPERAND (expr, 0);
10964 if (!op || !TREE_SIDE_EFFECTS (op))
10966 op = TREE_OPERAND (op, 1);
10967 if (!TREE_SIDE_EFFECTS (op))
10971 return build1 (CLEANUP_POINT_EXPR, type, expr);
10974 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10975 avoid confusing the gimplify process. */
10978 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10980 /* The size of the object is not relevant when talking about its address. */
10981 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10982 t = TREE_OPERAND (t, 0);
10984 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10985 if (TREE_CODE (t) == INDIRECT_REF
10986 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10988 t = TREE_OPERAND (t, 0);
10989 if (TREE_TYPE (t) != ptrtype)
10990 t = build1 (NOP_EXPR, ptrtype, t);
10996 while (handled_component_p (base))
10997 base = TREE_OPERAND (base, 0);
10999 TREE_ADDRESSABLE (base) = 1;
11001 t = build1 (ADDR_EXPR, ptrtype, t);
11008 build_fold_addr_expr (tree t)
11010 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11013 /* Builds an expression for an indirection through T, simplifying some
11017 build_fold_indirect_ref (tree t)
11019 tree type = TREE_TYPE (TREE_TYPE (t));
11024 if (TREE_CODE (sub) == ADDR_EXPR)
11026 tree op = TREE_OPERAND (sub, 0);
11027 tree optype = TREE_TYPE (op);
11029 if (lang_hooks.types_compatible_p (type, optype))
11031 /* *(foo *)&fooarray => fooarray[0] */
11032 else if (TREE_CODE (optype) == ARRAY_TYPE
11033 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11034 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
11037 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11038 subtype = TREE_TYPE (sub);
11039 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11040 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11042 sub = build_fold_indirect_ref (sub);
11043 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
11046 return build1 (INDIRECT_REF, type, t);
11049 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11050 whose result is ignored. The type of the returned tree need not be
11051 the same as the original expression. */
11054 fold_ignored_result (tree t)
11056 if (!TREE_SIDE_EFFECTS (t))
11057 return integer_zero_node;
11060 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11063 t = TREE_OPERAND (t, 0);
11067 case tcc_comparison:
11068 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11069 t = TREE_OPERAND (t, 0);
11070 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11071 t = TREE_OPERAND (t, 1);
11076 case tcc_expression:
11077 switch (TREE_CODE (t))
11079 case COMPOUND_EXPR:
11080 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11082 t = TREE_OPERAND (t, 0);
11086 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11087 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11089 t = TREE_OPERAND (t, 0);
11102 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11103 This can only be applied to objects of a sizetype. */
11106 round_up (tree value, int divisor)
11108 tree div = NULL_TREE;
11110 gcc_assert (divisor > 0);
11114 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11115 have to do anything. Only do this when we are not given a const,
11116 because in that case, this check is more expensive than just
11118 if (TREE_CODE (value) != INTEGER_CST)
11120 div = build_int_cst (TREE_TYPE (value), divisor);
11122 if (multiple_of_p (TREE_TYPE (value), value, div))
11126 /* If divisor is a power of two, simplify this to bit manipulation. */
11127 if (divisor == (divisor & -divisor))
11131 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11132 value = size_binop (PLUS_EXPR, value, t);
11133 t = build_int_cst (TREE_TYPE (value), -divisor);
11134 value = size_binop (BIT_AND_EXPR, value, t);
11139 div = build_int_cst (TREE_TYPE (value), divisor);
11140 value = size_binop (CEIL_DIV_EXPR, value, div);
11141 value = size_binop (MULT_EXPR, value, div);
11147 /* Likewise, but round down. */
11150 round_down (tree value, int divisor)
11152 tree div = NULL_TREE;
11154 gcc_assert (divisor > 0);
11158 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11159 have to do anything. Only do this when we are not given a const,
11160 because in that case, this check is more expensive than just
11162 if (TREE_CODE (value) != INTEGER_CST)
11164 div = build_int_cst (TREE_TYPE (value), divisor);
11166 if (multiple_of_p (TREE_TYPE (value), value, div))
11170 /* If divisor is a power of two, simplify this to bit manipulation. */
11171 if (divisor == (divisor & -divisor))
11175 t = build_int_cst (TREE_TYPE (value), -divisor);
11176 value = size_binop (BIT_AND_EXPR, value, t);
11181 div = build_int_cst (TREE_TYPE (value), divisor);
11182 value = size_binop (FLOOR_DIV_EXPR, value, div);
11183 value = size_binop (MULT_EXPR, value, div);
11189 /* Returns the pointer to the base of the object addressed by EXP and
11190 extracts the information about the offset of the access, storing it
11191 to PBITPOS and POFFSET. */
11194 split_address_to_core_and_offset (tree exp,
11195 HOST_WIDE_INT *pbitpos, tree *poffset)
11198 enum machine_mode mode;
11199 int unsignedp, volatilep;
11200 HOST_WIDE_INT bitsize;
11202 if (TREE_CODE (exp) == ADDR_EXPR)
11204 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11205 poffset, &mode, &unsignedp, &volatilep,
11208 if (TREE_CODE (core) == INDIRECT_REF)
11209 core = TREE_OPERAND (core, 0);
11215 *poffset = NULL_TREE;
11221 /* Returns true if addresses of E1 and E2 differ by a constant, false
11222 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11225 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11228 HOST_WIDE_INT bitpos1, bitpos2;
11229 tree toffset1, toffset2, tdiff, type;
11231 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11232 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11234 if (bitpos1 % BITS_PER_UNIT != 0
11235 || bitpos2 % BITS_PER_UNIT != 0
11236 || !operand_equal_p (core1, core2, 0))
11239 if (toffset1 && toffset2)
11241 type = TREE_TYPE (toffset1);
11242 if (type != TREE_TYPE (toffset2))
11243 toffset2 = fold_convert (type, toffset2);
11245 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11246 if (!host_integerp (tdiff, 0))
11249 *diff = tree_low_cst (tdiff, 0);
11251 else if (toffset1 || toffset2)
11253 /* If only one of the offsets is non-constant, the difference cannot
11260 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11264 /* Simplify the floating point expression EXP when the sign of the
11265 result is not significant. Return NULL_TREE if no simplification
11269 fold_strip_sign_ops (tree exp)
11273 switch (TREE_CODE (exp))
11277 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11278 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11282 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11284 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11285 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11286 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11287 return fold (build2 (TREE_CODE (exp), TREE_TYPE (exp),
11288 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11289 arg1 ? arg1 : TREE_OPERAND (exp, 1)));