1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
116 static tree fold_range_test (tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static tree fold_relational_hi_lo (enum tree_code *, const tree,
137 static bool tree_expr_nonzero_p (tree);
139 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
140 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
141 and SUM1. Then this yields nonzero if overflow occurred during the
144 Overflow occurs if A and B have the same sign, but A and SUM differ in
145 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
150 We do that by representing the two-word integer in 4 words, with only
151 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
152 number. The value of the word is LOWPART + HIGHPART * BASE. */
155 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
156 #define HIGHPART(x) \
157 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
158 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160 /* Unpack a two-word integer into 4 words.
161 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
162 WORDS points to the array of HOST_WIDE_INTs. */
165 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 words[0] = LOWPART (low);
168 words[1] = HIGHPART (low);
169 words[2] = LOWPART (hi);
170 words[3] = HIGHPART (hi);
173 /* Pack an array of 4 words into a two-word integer.
174 WORDS points to the array of words.
175 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 *low = words[0] + words[1] * BASE;
182 *hi = words[2] + words[3] * BASE;
185 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
186 in overflow of the value, when >0 we are only interested in signed
187 overflow, for <0 we are interested in any overflow. OVERFLOWED
188 indicates whether overflow has already occurred. CONST_OVERFLOWED
189 indicates whether constant overflow has already occurred. We force
190 T's value to be within range of T's type (by setting to 0 or 1 all
191 the bits outside the type's range). We set TREE_OVERFLOWED if,
192 OVERFLOWED is nonzero,
193 or OVERFLOWABLE is >0 and signed overflow occurs
194 or OVERFLOWABLE is <0 and any overflow occurs
195 We set TREE_CONSTANT_OVERFLOWED if,
196 CONST_OVERFLOWED is nonzero
197 or we set TREE_OVERFLOWED.
198 We return either the original T, or a copy. */
201 force_fit_type (tree t, int overflowable,
202 bool overflowed, bool overflowed_const)
204 unsigned HOST_WIDE_INT low;
207 int sign_extended_type;
209 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* Size types *are* sign extended. */
220 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
221 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224 /* First clear all bits that are beyond the type's precision. */
226 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 else if (prec > HOST_BITS_PER_WIDE_INT)
229 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 low &= ~((HOST_WIDE_INT) (-1) << prec);
237 if (!sign_extended_type)
238 /* No sign extension */;
239 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
240 /* Correct width already. */;
241 else if (prec > HOST_BITS_PER_WIDE_INT)
243 /* Sign extend top half? */
244 if (high & ((unsigned HOST_WIDE_INT)1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 else if (prec == HOST_BITS_PER_WIDE_INT)
250 if ((HOST_WIDE_INT)low < 0)
255 /* Sign extend bottom half? */
256 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
259 low |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value changed, return a new node. */
264 if (overflowed || overflowed_const
265 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 || (overflowable > 0 && sign_extended_type))
274 TREE_OVERFLOW (t) = 1;
275 TREE_CONSTANT_OVERFLOW (t) = 1;
277 else if (overflowed_const)
280 TREE_CONSTANT_OVERFLOW (t) = 1;
287 /* Add two doubleword integers with doubleword result.
288 Each argument is given as two `HOST_WIDE_INT' pieces.
289 One argument is L1 and H1; the other, L2 and H2.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
294 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
295 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 unsigned HOST_WIDE_INT l;
301 h = h1 + h2 + (l < l1);
305 return OVERFLOW_SUM_SIGN (h1, h2, h);
308 /* Negate a doubleword integer with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
311 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
315 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
321 return (*hv & h1) < 0;
331 /* Multiply two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows, assuming it's signed.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 HOST_WIDE_INT arg1[4];
343 HOST_WIDE_INT arg2[4];
344 HOST_WIDE_INT prod[4 * 2];
345 unsigned HOST_WIDE_INT carry;
347 unsigned HOST_WIDE_INT toplow, neglow;
348 HOST_WIDE_INT tophigh, neghigh;
350 encode (arg1, l1, h1);
351 encode (arg2, l2, h2);
353 memset (prod, 0, sizeof prod);
355 for (i = 0; i < 4; i++)
358 for (j = 0; j < 4; j++)
361 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
362 carry += arg1[i] * arg2[j];
363 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
365 prod[k] = LOWPART (carry);
366 carry = HIGHPART (carry);
371 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373 /* Check for overflow by calculating the top half of the answer in full;
374 it should agree with the low half's sign bit. */
375 decode (prod + 4, &toplow, &tophigh);
378 neg_double (l2, h2, &neglow, &neghigh);
379 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 neg_double (l1, h1, &neglow, &neghigh);
384 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
389 /* Shift the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Shift right if COUNT is negative.
392 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
396 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
397 HOST_WIDE_INT count, unsigned int prec,
398 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 unsigned HOST_WIDE_INT signmask;
404 rshift_double (l1, h1, -count, prec, lv, hv, arith);
408 if (SHIFT_COUNT_TRUNCATED)
411 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 /* Shifting by the host word size is undefined according to the
414 ANSI standard, so we must handle this as a special case. */
418 else if (count >= HOST_BITS_PER_WIDE_INT)
420 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
425 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
426 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
430 /* Sign extend all bits that are beyond the precision. */
432 signmask = -((prec > HOST_BITS_PER_WIDE_INT
433 ? ((unsigned HOST_WIDE_INT) *hv
434 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
435 : (*lv >> (prec - 1))) & 1);
437 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
442 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
447 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
448 *lv |= signmask << prec;
452 /* Shift the doubleword integer in L1, H1 right by COUNT places
453 keeping only PREC bits of result. COUNT must be positive.
454 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
458 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
459 HOST_WIDE_INT count, unsigned int prec,
460 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 unsigned HOST_WIDE_INT signmask;
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 if (SHIFT_COUNT_TRUNCATED)
472 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 /* Shifting by the host word size is undefined according to the
475 ANSI standard, so we must handle this as a special case. */
479 else if (count >= HOST_BITS_PER_WIDE_INT)
482 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
488 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
491 /* Zero / sign extend all bits that are beyond the precision. */
493 if (count >= (HOST_WIDE_INT)prec)
498 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
503 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
508 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
509 *lv |= signmask << (prec - count);
513 /* Rotate the doubleword integer in L1, H1 left by COUNT places
514 keeping only PREC bits of result.
515 Rotate right if COUNT is negative.
516 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
519 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
520 HOST_WIDE_INT count, unsigned int prec,
521 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 unsigned HOST_WIDE_INT s1l, s2l;
524 HOST_WIDE_INT s1h, s2h;
530 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
531 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
536 /* Rotate the doubleword integer in L1, H1 left by COUNT places
537 keeping only PREC bits of result. COUNT must be positive.
538 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
541 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
542 HOST_WIDE_INT count, unsigned int prec,
543 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 unsigned HOST_WIDE_INT s1l, s2l;
546 HOST_WIDE_INT s1h, s2h;
552 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
553 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
558 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
559 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
560 CODE is a tree code for a kind of division, one of
561 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 It controls how the quotient is rounded to an integer.
564 Return nonzero if the operation overflows.
565 UNS nonzero says do unsigned division. */
568 div_and_round_double (enum tree_code code, int uns,
569 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig,
571 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig,
573 unsigned HOST_WIDE_INT *lquo,
574 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
592 /* Calculate quotient sign and convert operands to unsigned. */
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
606 neg_double (lden, hden, &lden, &hden);
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
613 /* This unsigned division rounds toward zero. */
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
627 memset (quo, 0, sizeof quo);
629 memset (num, 0, sizeof num); /* to zero 9th element */
630 memset (den, 0, sizeof den);
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
664 scale = BASE / (den[den_hi_sig] + 1);
666 { /* scale divisor and dividend */
668 for (i = 0; i <= 4 - 1; i++)
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
677 for (i = 0; i <= 4 - 1; i++)
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
715 for (j = 0; j <= den_hi_sig; j++)
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
737 num [num_hi_sig] += carry;
740 /* Store the quotient digit. */
745 decode (quo, lquo, hquo);
748 /* If result is negative, make it so. */
750 neg_double (*lquo, *hquo, lquo, hquo);
752 /* Compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
788 case ROUND_MOD_EXPR: /* round to closest integer */
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
795 /* Get absolute values. */
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 neg_double (lden, hden, &labs_den, &habs_den);
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, <wice, &htwice);
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 /* Compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
836 /* Return true if built-in mathematical function specified by CODE
837 preserves the sign of it argument, i.e. -f(x) == f(-x). */
840 negate_mathfn_p (enum built_in_function code)
864 /* Check whether we may negate an integer constant T without causing
868 may_negate_without_overflow_p (tree t)
870 unsigned HOST_WIDE_INT val;
874 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876 type = TREE_TYPE (t);
877 if (TYPE_UNSIGNED (type))
880 prec = TYPE_PRECISION (type);
881 if (prec > HOST_BITS_PER_WIDE_INT)
883 if (TREE_INT_CST_LOW (t) != 0)
885 prec -= HOST_BITS_PER_WIDE_INT;
886 val = TREE_INT_CST_HIGH (t);
889 val = TREE_INT_CST_LOW (t);
890 if (prec < HOST_BITS_PER_WIDE_INT)
891 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
892 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
895 /* Determine whether an expression T can be cheaply negated using
896 the function negate_expr. */
899 negate_expr_p (tree t)
906 type = TREE_TYPE (t);
909 switch (TREE_CODE (t))
912 if (TYPE_UNSIGNED (type) || ! flag_trapv)
915 /* Check that -CST will not overflow type. */
916 return may_negate_without_overflow_p (t);
923 return negate_expr_p (TREE_REALPART (t))
924 && negate_expr_p (TREE_IMAGPART (t));
927 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
929 /* -(A + B) -> (-B) - A. */
930 if (negate_expr_p (TREE_OPERAND (t, 1))
931 && reorder_operands_p (TREE_OPERAND (t, 0),
932 TREE_OPERAND (t, 1)))
934 /* -(A + B) -> (-A) - B. */
935 return negate_expr_p (TREE_OPERAND (t, 0));
938 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
939 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
940 && reorder_operands_p (TREE_OPERAND (t, 0),
941 TREE_OPERAND (t, 1));
944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
950 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
951 return negate_expr_p (TREE_OPERAND (t, 1))
952 || negate_expr_p (TREE_OPERAND (t, 0));
956 /* Negate -((double)float) as (double)(-float). */
957 if (TREE_CODE (type) == REAL_TYPE)
959 tree tem = strip_float_extensions (t);
961 return negate_expr_p (tem);
966 /* Negate -f(x) as f(-x). */
967 if (negate_mathfn_p (builtin_mathfn_code (t)))
968 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
972 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
973 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 tree op1 = TREE_OPERAND (t, 1);
976 if (TREE_INT_CST_HIGH (op1) == 0
977 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
978 == TREE_INT_CST_LOW (op1))
989 /* Given T, an expression, return the negation of T. Allow for T to be
990 null, in which case return null. */
1001 type = TREE_TYPE (t);
1002 STRIP_SIGN_NOPS (t);
1004 switch (TREE_CODE (t))
1007 tem = fold_negate_const (t, type);
1008 if (! TREE_OVERFLOW (tem)
1009 || TYPE_UNSIGNED (type)
1015 tem = fold_negate_const (t, type);
1016 /* Two's complement FP formats, such as c4x, may overflow. */
1017 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1018 return fold_convert (type, tem);
1023 tree rpart = negate_expr (TREE_REALPART (t));
1024 tree ipart = negate_expr (TREE_IMAGPART (t));
1026 if ((TREE_CODE (rpart) == REAL_CST
1027 && TREE_CODE (ipart) == REAL_CST)
1028 || (TREE_CODE (rpart) == INTEGER_CST
1029 && TREE_CODE (ipart) == INTEGER_CST))
1030 return build_complex (type, rpart, ipart);
1035 return fold_convert (type, TREE_OPERAND (t, 0));
1038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 /* -(A + B) -> (-B) - A. */
1041 if (negate_expr_p (TREE_OPERAND (t, 1))
1042 && reorder_operands_p (TREE_OPERAND (t, 0),
1043 TREE_OPERAND (t, 1)))
1045 tem = negate_expr (TREE_OPERAND (t, 1));
1046 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1047 tem, TREE_OPERAND (t, 0)));
1048 return fold_convert (type, tem);
1051 /* -(A + B) -> (-A) - B. */
1052 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 tem = negate_expr (TREE_OPERAND (t, 0));
1055 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1056 tem, TREE_OPERAND (t, 1)));
1057 return fold_convert (type, tem);
1063 /* - (A - B) -> B - A */
1064 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1066 return fold_convert (type,
1067 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1068 TREE_OPERAND (t, 1),
1069 TREE_OPERAND (t, 0))));
1073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1079 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 tem = TREE_OPERAND (t, 1);
1082 if (negate_expr_p (tem))
1083 return fold_convert (type,
1084 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1085 TREE_OPERAND (t, 0),
1086 negate_expr (tem))));
1087 tem = TREE_OPERAND (t, 0);
1088 if (negate_expr_p (tem))
1089 return fold_convert (type,
1090 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1092 TREE_OPERAND (t, 1))));
1097 /* Convert -((double)float) into (double)(-float). */
1098 if (TREE_CODE (type) == REAL_TYPE)
1100 tem = strip_float_extensions (t);
1101 if (tem != t && negate_expr_p (tem))
1102 return fold_convert (type, negate_expr (tem));
1107 /* Negate -f(x) as f(-x). */
1108 if (negate_mathfn_p (builtin_mathfn_code (t))
1109 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 tree fndecl, arg, arglist;
1113 fndecl = get_callee_fndecl (t);
1114 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1115 arglist = build_tree_list (NULL_TREE, arg);
1116 return build_function_call_expr (fndecl, arglist);
1121 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1122 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 tree op1 = TREE_OPERAND (t, 1);
1125 if (TREE_INT_CST_HIGH (op1) == 0
1126 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1127 == TREE_INT_CST_LOW (op1))
1129 tree ntype = TYPE_UNSIGNED (type)
1130 ? lang_hooks.types.signed_type (type)
1131 : lang_hooks.types.unsigned_type (type);
1132 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1133 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1134 return fold_convert (type, temp);
1143 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1144 return fold_convert (type, tem);
1147 /* Split a tree IN into a constant, literal and variable parts that could be
1148 combined with CODE to make IN. "constant" means an expression with
1149 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1150 commutative arithmetic operation. Store the constant part into *CONP,
1151 the literal in *LITP and return the variable part. If a part isn't
1152 present, set it to null. If the tree does not decompose in this way,
1153 return the entire tree as the variable part and the other parts as null.
1155 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1156 case, we negate an operand that was subtracted. Except if it is a
1157 literal for which we use *MINUS_LITP instead.
1159 If NEGATE_P is true, we are negating all of IN, again except a literal
1160 for which we use *MINUS_LITP instead.
1162 If IN is itself a literal or constant, return it as appropriate.
1164 Note that we do not guarantee that any of the three values will be the
1165 same type as IN, but they will have the same signedness and mode. */
1168 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1169 tree *minus_litp, int negate_p)
1177 /* Strip any conversions that don't change the machine mode or signedness. */
1178 STRIP_SIGN_NOPS (in);
1180 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1182 else if (TREE_CODE (in) == code
1183 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1184 /* We can associate addition and subtraction together (even
1185 though the C standard doesn't say so) for integers because
1186 the value is not affected. For reals, the value might be
1187 affected, so we can't. */
1188 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1189 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 tree op0 = TREE_OPERAND (in, 0);
1192 tree op1 = TREE_OPERAND (in, 1);
1193 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1194 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196 /* First see if either of the operands is a literal, then a constant. */
1197 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1198 *litp = op0, op0 = 0;
1199 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1200 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202 if (op0 != 0 && TREE_CONSTANT (op0))
1203 *conp = op0, op0 = 0;
1204 else if (op1 != 0 && TREE_CONSTANT (op1))
1205 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207 /* If we haven't dealt with either operand, this is not a case we can
1208 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1209 if (op0 != 0 && op1 != 0)
1214 var = op1, neg_var_p = neg1_p;
1216 /* Now do any needed negations. */
1218 *minus_litp = *litp, *litp = 0;
1220 *conp = negate_expr (*conp);
1222 var = negate_expr (var);
1224 else if (TREE_CONSTANT (in))
1232 *minus_litp = *litp, *litp = 0;
1233 else if (*minus_litp)
1234 *litp = *minus_litp, *minus_litp = 0;
1235 *conp = negate_expr (*conp);
1236 var = negate_expr (var);
1242 /* Re-associate trees split by the above function. T1 and T2 are either
1243 expressions to associate or null. Return the new expression, if any. If
1244 we build an operation, do it in TYPE and with CODE. */
1247 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1254 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1255 try to fold this since we will have infinite recursion. But do
1256 deal with any NEGATE_EXPRs. */
1257 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1258 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 if (code == PLUS_EXPR)
1262 if (TREE_CODE (t1) == NEGATE_EXPR)
1263 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1264 fold_convert (type, TREE_OPERAND (t1, 0)));
1265 else if (TREE_CODE (t2) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1267 fold_convert (type, TREE_OPERAND (t2, 0)));
1268 else if (integer_zerop (t2))
1269 return fold_convert (type, t1);
1271 else if (code == MINUS_EXPR)
1273 if (integer_zerop (t2))
1274 return fold_convert (type, t1);
1277 return build2 (code, type, fold_convert (type, t1),
1278 fold_convert (type, t2));
1281 return fold (build2 (code, type, fold_convert (type, t1),
1282 fold_convert (type, t2)));
1285 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1286 to produce a new constant.
1288 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1291 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 unsigned HOST_WIDE_INT int1l, int2l;
1294 HOST_WIDE_INT int1h, int2h;
1295 unsigned HOST_WIDE_INT low;
1297 unsigned HOST_WIDE_INT garbagel;
1298 HOST_WIDE_INT garbageh;
1300 tree type = TREE_TYPE (arg1);
1301 int uns = TYPE_UNSIGNED (type);
1303 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1305 int no_overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1315 low = int1l | int2l, hi = int1h | int2h;
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1323 low = int1l & int2l, hi = int1h & int2h;
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1345 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1349 neg_double (int2l, int2h, &low, &hi);
1350 add_double (int1l, int1h, low, hi, &low, &hi);
1351 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1355 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1358 case TRUNC_DIV_EXPR:
1359 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* This is a shortcut for a common special case. */
1362 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1363 && ! TREE_CONSTANT_OVERFLOW (arg1)
1364 && ! TREE_CONSTANT_OVERFLOW (arg2)
1365 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1367 if (code == CEIL_DIV_EXPR)
1370 low = int1l / int2l, hi = 0;
1374 /* ... fall through ... */
1376 case ROUND_DIV_EXPR:
1377 if (int2h == 0 && int2l == 1)
1379 low = int1l, hi = int1h;
1382 if (int1l == int2l && int1h == int2h
1383 && ! (int1l == 0 && int1h == 0))
1388 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1389 &low, &hi, &garbagel, &garbageh);
1392 case TRUNC_MOD_EXPR:
1393 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_MOD_EXPR)
1402 low = int1l % int2l, hi = 0;
1406 /* ... fall through ... */
1408 case ROUND_MOD_EXPR:
1409 overflow = div_and_round_double (code, uns,
1410 int1l, int1h, int2l, int2h,
1411 &garbagel, &garbageh, &low, &hi);
1417 low = (((unsigned HOST_WIDE_INT) int1h
1418 < (unsigned HOST_WIDE_INT) int2h)
1419 || (((unsigned HOST_WIDE_INT) int1h
1420 == (unsigned HOST_WIDE_INT) int2h)
1423 low = (int1h < int2h
1424 || (int1h == int2h && int1l < int2l));
1426 if (low == (code == MIN_EXPR))
1427 low = int1l, hi = int1h;
1429 low = int2l, hi = int2h;
1436 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1440 /* Propagate overflow flags ourselves. */
1441 if (((!uns || is_sizetype) && overflow)
1442 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1445 TREE_OVERFLOW (t) = 1;
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1451 TREE_CONSTANT_OVERFLOW (t) = 1;
1455 t = force_fit_type (t, 1,
1456 ((!uns || is_sizetype) && overflow)
1457 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1458 TREE_CONSTANT_OVERFLOW (arg1)
1459 | TREE_CONSTANT_OVERFLOW (arg2));
1464 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1465 constant. We assume ARG1 and ARG2 have the same data type, or at least
1466 are the same kind of constant and the same machine mode.
1468 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1471 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1476 if (TREE_CODE (arg1) == INTEGER_CST)
1477 return int_const_binop (code, arg1, arg2, notrunc);
1479 if (TREE_CODE (arg1) == REAL_CST)
1481 enum machine_mode mode;
1484 REAL_VALUE_TYPE value;
1487 d1 = TREE_REAL_CST (arg1);
1488 d2 = TREE_REAL_CST (arg2);
1490 type = TREE_TYPE (arg1);
1491 mode = TYPE_MODE (type);
1493 /* Don't perform operation if we honor signaling NaNs and
1494 either operand is a NaN. */
1495 if (HONOR_SNANS (mode)
1496 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1499 /* Don't perform operation if it would raise a division
1500 by zero exception. */
1501 if (code == RDIV_EXPR
1502 && REAL_VALUES_EQUAL (d2, dconst0)
1503 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1506 /* If either operand is a NaN, just return it. Otherwise, set up
1507 for floating-point trap; we return an overflow. */
1508 if (REAL_VALUE_ISNAN (d1))
1510 else if (REAL_VALUE_ISNAN (d2))
1513 REAL_ARITHMETIC (value, code, d1, d2);
1515 t = build_real (type, real_value_truncate (mode, value));
1517 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1518 TREE_CONSTANT_OVERFLOW (t)
1520 | TREE_CONSTANT_OVERFLOW (arg1)
1521 | TREE_CONSTANT_OVERFLOW (arg2);
1524 if (TREE_CODE (arg1) == COMPLEX_CST)
1526 tree type = TREE_TYPE (arg1);
1527 tree r1 = TREE_REALPART (arg1);
1528 tree i1 = TREE_IMAGPART (arg1);
1529 tree r2 = TREE_REALPART (arg2);
1530 tree i2 = TREE_IMAGPART (arg2);
1536 t = build_complex (type,
1537 const_binop (PLUS_EXPR, r1, r2, notrunc),
1538 const_binop (PLUS_EXPR, i1, i2, notrunc));
1542 t = build_complex (type,
1543 const_binop (MINUS_EXPR, r1, r2, notrunc),
1544 const_binop (MINUS_EXPR, i1, i2, notrunc));
1548 t = build_complex (type,
1549 const_binop (MINUS_EXPR,
1550 const_binop (MULT_EXPR,
1552 const_binop (MULT_EXPR,
1555 const_binop (PLUS_EXPR,
1556 const_binop (MULT_EXPR,
1558 const_binop (MULT_EXPR,
1566 = const_binop (PLUS_EXPR,
1567 const_binop (MULT_EXPR, r2, r2, notrunc),
1568 const_binop (MULT_EXPR, i2, i2, notrunc),
1571 t = build_complex (type,
1573 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1574 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1575 const_binop (PLUS_EXPR,
1576 const_binop (MULT_EXPR, r1, r2,
1578 const_binop (MULT_EXPR, i1, i2,
1581 magsquared, notrunc),
1583 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1584 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR, i1, r2,
1588 const_binop (MULT_EXPR, r1, i2,
1591 magsquared, notrunc));
1603 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1604 indicates which particular sizetype to create. */
1607 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1609 return build_int_cst (sizetype_tab[(int) kind], number);
1612 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1613 is a tree code. The type of the result is taken from the operands.
1614 Both must be the same type integer type and it must be a size type.
1615 If the operands are constant, so is the result. */
1618 size_binop (enum tree_code code, tree arg0, tree arg1)
1620 tree type = TREE_TYPE (arg0);
1622 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1623 && type == TREE_TYPE (arg1));
1625 /* Handle the special case of two integer constants faster. */
1626 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1628 /* And some specific cases even faster than that. */
1629 if (code == PLUS_EXPR && integer_zerop (arg0))
1631 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1632 && integer_zerop (arg1))
1634 else if (code == MULT_EXPR && integer_onep (arg0))
1637 /* Handle general case of two integer constants. */
1638 return int_const_binop (code, arg0, arg1, 0);
1641 if (arg0 == error_mark_node || arg1 == error_mark_node)
1642 return error_mark_node;
1644 return fold (build2 (code, type, arg0, arg1));
1647 /* Given two values, either both of sizetype or both of bitsizetype,
1648 compute the difference between the two values. Return the value
1649 in signed type corresponding to the type of the operands. */
1652 size_diffop (tree arg0, tree arg1)
1654 tree type = TREE_TYPE (arg0);
1657 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1658 && type == TREE_TYPE (arg1));
1660 /* If the type is already signed, just do the simple thing. */
1661 if (!TYPE_UNSIGNED (type))
1662 return size_binop (MINUS_EXPR, arg0, arg1);
1664 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1666 /* If either operand is not a constant, do the conversions to the signed
1667 type and subtract. The hardware will do the right thing with any
1668 overflow in the subtraction. */
1669 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1670 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1671 fold_convert (ctype, arg1));
1673 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1674 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1675 overflow) and negate (which can't either). Special-case a result
1676 of zero while we're here. */
1677 if (tree_int_cst_equal (arg0, arg1))
1678 return fold_convert (ctype, integer_zero_node);
1679 else if (tree_int_cst_lt (arg1, arg0))
1680 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1682 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1683 fold_convert (ctype, size_binop (MINUS_EXPR,
1687 /* A subroutine of fold_convert_const handling conversions of an
1688 INTEGER_CST to another integer type. */
1691 fold_convert_const_int_from_int (tree type, tree arg1)
1695 /* Given an integer constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1698 TREE_INT_CST_HIGH (arg1));
1700 t = force_fit_type (t,
1701 /* Don't set the overflow when
1702 converting a pointer */
1703 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1704 (TREE_INT_CST_HIGH (arg1) < 0
1705 && (TYPE_UNSIGNED (type)
1706 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1707 | TREE_OVERFLOW (arg1),
1708 TREE_CONSTANT_OVERFLOW (arg1));
1713 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1714 to an integer type. */
1717 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1722 /* The following code implements the floating point to integer
1723 conversion rules required by the Java Language Specification,
1724 that IEEE NaNs are mapped to zero and values that overflow
1725 the target precision saturate, i.e. values greater than
1726 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1727 are mapped to INT_MIN. These semantics are allowed by the
1728 C and C++ standards that simply state that the behavior of
1729 FP-to-integer conversion is unspecified upon overflow. */
1731 HOST_WIDE_INT high, low;
1733 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1737 case FIX_TRUNC_EXPR:
1738 real_trunc (&r, VOIDmode, &x);
1742 real_ceil (&r, VOIDmode, &x);
1745 case FIX_FLOOR_EXPR:
1746 real_floor (&r, VOIDmode, &x);
1749 case FIX_ROUND_EXPR:
1750 real_round (&r, VOIDmode, &x);
1757 /* If R is NaN, return zero and show we have an overflow. */
1758 if (REAL_VALUE_ISNAN (r))
1765 /* See if R is less than the lower bound or greater than the
1770 tree lt = TYPE_MIN_VALUE (type);
1771 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1772 if (REAL_VALUES_LESS (r, l))
1775 high = TREE_INT_CST_HIGH (lt);
1776 low = TREE_INT_CST_LOW (lt);
1782 tree ut = TYPE_MAX_VALUE (type);
1785 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1786 if (REAL_VALUES_LESS (u, r))
1789 high = TREE_INT_CST_HIGH (ut);
1790 low = TREE_INT_CST_LOW (ut);
1796 REAL_VALUE_TO_INT (&low, &high, r);
1798 t = build_int_cst_wide (type, low, high);
1800 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1801 TREE_CONSTANT_OVERFLOW (arg1));
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to another floating point type. */
1809 fold_convert_const_real_from_real (tree type, tree arg1)
1813 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1815 /* We make a copy of ARG1 so that we don't modify an
1816 existing constant tree. */
1817 t = copy_node (arg1);
1818 TREE_TYPE (t) = type;
1822 t = build_real (type,
1823 real_value_truncate (TYPE_MODE (type),
1824 TREE_REAL_CST (arg1)));
1826 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1827 TREE_CONSTANT_OVERFLOW (t)
1828 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1832 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1833 type TYPE. If no simplification can be done return NULL_TREE. */
1836 fold_convert_const (enum tree_code code, tree type, tree arg1)
1838 if (TREE_TYPE (arg1) == type)
1841 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1843 if (TREE_CODE (arg1) == INTEGER_CST)
1844 return fold_convert_const_int_from_int (type, arg1);
1845 else if (TREE_CODE (arg1) == REAL_CST)
1846 return fold_convert_const_int_from_real (code, type, arg1);
1848 else if (TREE_CODE (type) == REAL_TYPE)
1850 if (TREE_CODE (arg1) == INTEGER_CST)
1851 return build_real_from_int_cst (type, arg1);
1852 if (TREE_CODE (arg1) == REAL_CST)
1853 return fold_convert_const_real_from_real (type, arg1);
1858 /* Construct a vector of zero elements of vector type TYPE. */
1861 build_zero_vector (tree type)
1866 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1867 units = TYPE_VECTOR_SUBPARTS (type);
1870 for (i = 0; i < units; i++)
1871 list = tree_cons (NULL_TREE, elem, list);
1872 return build_vector (type, list);
1875 /* Convert expression ARG to type TYPE. Used by the middle-end for
1876 simple conversions in preference to calling the front-end's convert. */
1879 fold_convert (tree type, tree arg)
1881 tree orig = TREE_TYPE (arg);
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return error_mark_node;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1893 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1894 TYPE_MAIN_VARIANT (orig)))
1895 return fold (build1 (NOP_EXPR, type, arg));
1897 switch (TREE_CODE (type))
1899 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1900 case POINTER_TYPE: case REFERENCE_TYPE:
1902 if (TREE_CODE (arg) == INTEGER_CST)
1904 tem = fold_convert_const (NOP_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1909 || TREE_CODE (orig) == OFFSET_TYPE)
1910 return fold (build1 (NOP_EXPR, type, arg));
1911 if (TREE_CODE (orig) == COMPLEX_TYPE)
1913 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1914 return fold_convert (type, tem);
1916 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1917 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1918 return fold (build1 (NOP_EXPR, type, arg));
1921 if (TREE_CODE (arg) == INTEGER_CST)
1923 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1924 if (tem != NULL_TREE)
1927 else if (TREE_CODE (arg) == REAL_CST)
1929 tem = fold_convert_const (NOP_EXPR, type, arg);
1930 if (tem != NULL_TREE)
1934 switch (TREE_CODE (orig))
1936 case INTEGER_TYPE: case CHAR_TYPE:
1937 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1938 case POINTER_TYPE: case REFERENCE_TYPE:
1939 return fold (build1 (FLOAT_EXPR, type, arg));
1942 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1946 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1947 return fold_convert (type, tem);
1954 switch (TREE_CODE (orig))
1956 case INTEGER_TYPE: case CHAR_TYPE:
1957 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1958 case POINTER_TYPE: case REFERENCE_TYPE:
1960 return build2 (COMPLEX_EXPR, type,
1961 fold_convert (TREE_TYPE (type), arg),
1962 fold_convert (TREE_TYPE (type), integer_zero_node));
1967 if (TREE_CODE (arg) == COMPLEX_EXPR)
1969 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1970 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1971 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1974 arg = save_expr (arg);
1975 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1976 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1977 rpart = fold_convert (TREE_TYPE (type), rpart);
1978 ipart = fold_convert (TREE_TYPE (type), ipart);
1979 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1987 if (integer_zerop (arg))
1988 return build_zero_vector (type);
1989 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1990 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1991 || TREE_CODE (orig) == VECTOR_TYPE);
1992 return fold (build1 (NOP_EXPR, type, arg));
1995 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2002 /* Return an expr equal to X but certainly not valid as an lvalue. */
2007 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2012 /* We only need to wrap lvalue tree codes. */
2013 switch (TREE_CODE (x))
2024 case ALIGN_INDIRECT_REF:
2025 case MISALIGNED_INDIRECT_REF:
2027 case ARRAY_RANGE_REF:
2033 case PREINCREMENT_EXPR:
2034 case PREDECREMENT_EXPR:
2036 case TRY_CATCH_EXPR:
2037 case WITH_CLEANUP_EXPR:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2053 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2059 int pedantic_lvalues;
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2065 pedantic_non_lvalue (tree x)
2067 if (pedantic_lvalues)
2068 return non_lvalue (x);
2073 /* Given a tree comparison code, return the code that is the logical inverse
2074 of the given code. It is not safe to do this for floating-point
2075 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2076 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2078 static enum tree_code
2079 invert_tree_comparison (enum tree_code code, bool honor_nans)
2081 if (honor_nans && flag_trapping_math)
2091 return honor_nans ? UNLE_EXPR : LE_EXPR;
2093 return honor_nans ? UNLT_EXPR : LT_EXPR;
2095 return honor_nans ? UNGE_EXPR : GE_EXPR;
2097 return honor_nans ? UNGT_EXPR : GT_EXPR;
2111 return UNORDERED_EXPR;
2112 case UNORDERED_EXPR:
2113 return ORDERED_EXPR;
2119 /* Similar, but return the comparison that results if the operands are
2120 swapped. This is safe for floating-point. */
2123 swap_tree_comparison (enum tree_code code)
2144 /* Convert a comparison tree code from an enum tree_code representation
2145 into a compcode bit-based encoding. This function is the inverse of
2146 compcode_to_comparison. */
2148 static enum comparison_code
2149 comparison_to_compcode (enum tree_code code)
2166 return COMPCODE_ORD;
2167 case UNORDERED_EXPR:
2168 return COMPCODE_UNORD;
2170 return COMPCODE_UNLT;
2172 return COMPCODE_UNEQ;
2174 return COMPCODE_UNLE;
2176 return COMPCODE_UNGT;
2178 return COMPCODE_LTGT;
2180 return COMPCODE_UNGE;
2186 /* Convert a compcode bit-based encoding of a comparison operator back
2187 to GCC's enum tree_code representation. This function is the
2188 inverse of comparison_to_compcode. */
2190 static enum tree_code
2191 compcode_to_comparison (enum comparison_code code)
2208 return ORDERED_EXPR;
2209 case COMPCODE_UNORD:
2210 return UNORDERED_EXPR;
2228 /* Return a tree for the comparison which is the combination of
2229 doing the AND or OR (depending on CODE) of the two operations LCODE
2230 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2231 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2232 if this makes the transformation invalid. */
2235 combine_comparisons (enum tree_code code, enum tree_code lcode,
2236 enum tree_code rcode, tree truth_type,
2237 tree ll_arg, tree lr_arg)
2239 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2240 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2241 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2242 enum comparison_code compcode;
2246 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2247 compcode = lcompcode & rcompcode;
2250 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2251 compcode = lcompcode | rcompcode;
2260 /* Eliminate unordered comparisons, as well as LTGT and ORD
2261 which are not used unless the mode has NaNs. */
2262 compcode &= ~COMPCODE_UNORD;
2263 if (compcode == COMPCODE_LTGT)
2264 compcode = COMPCODE_NE;
2265 else if (compcode == COMPCODE_ORD)
2266 compcode = COMPCODE_TRUE;
2268 else if (flag_trapping_math)
2270 /* Check that the original operation and the optimized ones will trap
2271 under the same condition. */
2272 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2273 && (lcompcode != COMPCODE_EQ)
2274 && (lcompcode != COMPCODE_ORD);
2275 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2276 && (rcompcode != COMPCODE_EQ)
2277 && (rcompcode != COMPCODE_ORD);
2278 bool trap = (compcode & COMPCODE_UNORD) == 0
2279 && (compcode != COMPCODE_EQ)
2280 && (compcode != COMPCODE_ORD);
2282 /* In a short-circuited boolean expression the LHS might be
2283 such that the RHS, if evaluated, will never trap. For
2284 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2285 if neither x nor y is NaN. (This is a mixed blessing: for
2286 example, the expression above will never trap, hence
2287 optimizing it to x < y would be invalid). */
2288 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2289 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2292 /* If the comparison was short-circuited, and only the RHS
2293 trapped, we may now generate a spurious trap. */
2295 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2298 /* If we changed the conditions that cause a trap, we lose. */
2299 if ((ltrap || rtrap) != trap)
2303 if (compcode == COMPCODE_TRUE)
2304 return constant_boolean_node (true, truth_type);
2305 else if (compcode == COMPCODE_FALSE)
2306 return constant_boolean_node (false, truth_type);
2308 return fold (build2 (compcode_to_comparison (compcode),
2309 truth_type, ll_arg, lr_arg));
2312 /* Return nonzero if CODE is a tree code that represents a truth value. */
2315 truth_value_p (enum tree_code code)
2317 return (TREE_CODE_CLASS (code) == tcc_comparison
2318 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2319 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2320 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2356 /* If both types don't have the same signedness, then we can't consider
2357 them equal. We must check this before the STRIP_NOPS calls
2358 because they may change the signedness of the arguments. */
2359 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2365 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2366 /* This is needed for conversions and for COMPONENT_REF.
2367 Might as well play it safe and always test this. */
2368 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2369 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2370 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2373 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2374 We don't care about side effects in that case because the SAVE_EXPR
2375 takes care of that for us. In all other cases, two expressions are
2376 equal if they have no side effects. If we have two identical
2377 expressions with side effects that should be treated the same due
2378 to the only side effects being identical SAVE_EXPR's, that will
2379 be detected in the recursive calls below. */
2380 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2381 && (TREE_CODE (arg0) == SAVE_EXPR
2382 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2385 /* Next handle constant cases, those for which we can return 1 even
2386 if ONLY_CONST is set. */
2387 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2388 switch (TREE_CODE (arg0))
2391 return (! TREE_CONSTANT_OVERFLOW (arg0)
2392 && ! TREE_CONSTANT_OVERFLOW (arg1)
2393 && tree_int_cst_equal (arg0, arg1));
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2399 TREE_REAL_CST (arg1)));
2405 if (TREE_CONSTANT_OVERFLOW (arg0)
2406 || TREE_CONSTANT_OVERFLOW (arg1))
2409 v1 = TREE_VECTOR_CST_ELTS (arg0);
2410 v2 = TREE_VECTOR_CST_ELTS (arg1);
2413 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2416 v1 = TREE_CHAIN (v1);
2417 v2 = TREE_CHAIN (v2);
2424 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2426 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2430 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2431 && ! memcmp (TREE_STRING_POINTER (arg0),
2432 TREE_STRING_POINTER (arg1),
2433 TREE_STRING_LENGTH (arg0)));
2436 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2442 if (flags & OEP_ONLY_CONST)
2445 /* Define macros to test an operand from arg0 and arg1 for equality and a
2446 variant that allows null and views null as being different from any
2447 non-null value. In the latter case, if either is null, the both
2448 must be; otherwise, do the normal comparison. */
2449 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2450 TREE_OPERAND (arg1, N), flags)
2452 #define OP_SAME_WITH_NULL(N) \
2453 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2454 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2456 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2459 /* Two conversions are equal only if signedness and modes match. */
2460 switch (TREE_CODE (arg0))
2465 case FIX_TRUNC_EXPR:
2466 case FIX_FLOOR_EXPR:
2467 case FIX_ROUND_EXPR:
2468 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2469 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2479 case tcc_comparison:
2481 if (OP_SAME (0) && OP_SAME (1))
2484 /* For commutative ops, allow the other order. */
2485 return (commutative_tree_code (TREE_CODE (arg0))
2486 && operand_equal_p (TREE_OPERAND (arg0, 0),
2487 TREE_OPERAND (arg1, 1), flags)
2488 && operand_equal_p (TREE_OPERAND (arg0, 1),
2489 TREE_OPERAND (arg1, 0), flags));
2492 /* If either of the pointer (or reference) expressions we are
2493 dereferencing contain a side effect, these cannot be equal. */
2494 if (TREE_SIDE_EFFECTS (arg0)
2495 || TREE_SIDE_EFFECTS (arg1))
2498 switch (TREE_CODE (arg0))
2501 case ALIGN_INDIRECT_REF:
2502 case MISALIGNED_INDIRECT_REF:
2508 case ARRAY_RANGE_REF:
2509 /* Operands 2 and 3 may be null. */
2512 && OP_SAME_WITH_NULL (2)
2513 && OP_SAME_WITH_NULL (3));
2516 /* Handle operand 2 the same as for ARRAY_REF. */
2517 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2520 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2526 case tcc_expression:
2527 switch (TREE_CODE (arg0))
2530 case TRUTH_NOT_EXPR:
2533 case TRUTH_ANDIF_EXPR:
2534 case TRUTH_ORIF_EXPR:
2535 return OP_SAME (0) && OP_SAME (1);
2537 case TRUTH_AND_EXPR:
2539 case TRUTH_XOR_EXPR:
2540 if (OP_SAME (0) && OP_SAME (1))
2543 /* Otherwise take into account this is a commutative operation. */
2544 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2545 TREE_OPERAND (arg1, 1), flags)
2546 && operand_equal_p (TREE_OPERAND (arg0, 1),
2547 TREE_OPERAND (arg1, 0), flags));
2550 /* If the CALL_EXPRs call different functions, then they
2551 clearly can not be equal. */
2556 unsigned int cef = call_expr_flags (arg0);
2557 if (flags & OEP_PURE_SAME)
2558 cef &= ECF_CONST | ECF_PURE;
2565 /* Now see if all the arguments are the same. operand_equal_p
2566 does not handle TREE_LIST, so we walk the operands here
2567 feeding them to operand_equal_p. */
2568 arg0 = TREE_OPERAND (arg0, 1);
2569 arg1 = TREE_OPERAND (arg1, 1);
2570 while (arg0 && arg1)
2572 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2576 arg0 = TREE_CHAIN (arg0);
2577 arg1 = TREE_CHAIN (arg1);
2580 /* If we get here and both argument lists are exhausted
2581 then the CALL_EXPRs are equal. */
2582 return ! (arg0 || arg1);
2588 case tcc_declaration:
2589 /* Consider __builtin_sqrt equal to sqrt. */
2590 return (TREE_CODE (arg0) == FUNCTION_DECL
2591 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2592 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2593 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2600 #undef OP_SAME_WITH_NULL
2603 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2604 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2606 When in doubt, return 0. */
2609 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2611 int unsignedp1, unsignedpo;
2612 tree primarg0, primarg1, primother;
2613 unsigned int correct_width;
2615 if (operand_equal_p (arg0, arg1, 0))
2618 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2619 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2622 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2623 and see if the inner values are the same. This removes any
2624 signedness comparison, which doesn't matter here. */
2625 primarg0 = arg0, primarg1 = arg1;
2626 STRIP_NOPS (primarg0);
2627 STRIP_NOPS (primarg1);
2628 if (operand_equal_p (primarg0, primarg1, 0))
2631 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2632 actual comparison operand, ARG0.
2634 First throw away any conversions to wider types
2635 already present in the operands. */
2637 primarg1 = get_narrower (arg1, &unsignedp1);
2638 primother = get_narrower (other, &unsignedpo);
2640 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2641 if (unsignedp1 == unsignedpo
2642 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2643 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2645 tree type = TREE_TYPE (arg0);
2647 /* Make sure shorter operand is extended the right way
2648 to match the longer operand. */
2649 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2650 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2652 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2659 /* See if ARG is an expression that is either a comparison or is performing
2660 arithmetic on comparisons. The comparisons must only be comparing
2661 two different values, which will be stored in *CVAL1 and *CVAL2; if
2662 they are nonzero it means that some operands have already been found.
2663 No variables may be used anywhere else in the expression except in the
2664 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2665 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2667 If this is true, return 1. Otherwise, return zero. */
2670 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2672 enum tree_code code = TREE_CODE (arg);
2673 enum tree_code_class class = TREE_CODE_CLASS (code);
2675 /* We can handle some of the tcc_expression cases here. */
2676 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2678 else if (class == tcc_expression
2679 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2680 || code == COMPOUND_EXPR))
2683 else if (class == tcc_expression && code == SAVE_EXPR
2684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2686 /* If we've already found a CVAL1 or CVAL2, this expression is
2687 two complex to handle. */
2688 if (*cval1 || *cval2)
2698 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2701 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2702 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2703 cval1, cval2, save_p));
2708 case tcc_expression:
2709 if (code == COND_EXPR)
2710 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2711 cval1, cval2, save_p)
2712 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2713 cval1, cval2, save_p)
2714 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2715 cval1, cval2, save_p));
2718 case tcc_comparison:
2719 /* First see if we can handle the first operand, then the second. For
2720 the second operand, we know *CVAL1 can't be zero. It must be that
2721 one side of the comparison is each of the values; test for the
2722 case where this isn't true by failing if the two operands
2725 if (operand_equal_p (TREE_OPERAND (arg, 0),
2726 TREE_OPERAND (arg, 1), 0))
2730 *cval1 = TREE_OPERAND (arg, 0);
2731 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2733 else if (*cval2 == 0)
2734 *cval2 = TREE_OPERAND (arg, 0);
2735 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2740 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2742 else if (*cval2 == 0)
2743 *cval2 = TREE_OPERAND (arg, 1);
2744 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2756 /* ARG is a tree that is known to contain just arithmetic operations and
2757 comparisons. Evaluate the operations in the tree substituting NEW0 for
2758 any occurrence of OLD0 as an operand of a comparison and likewise for
2762 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2764 tree type = TREE_TYPE (arg);
2765 enum tree_code code = TREE_CODE (arg);
2766 enum tree_code_class class = TREE_CODE_CLASS (code);
2768 /* We can handle some of the tcc_expression cases here. */
2769 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2771 else if (class == tcc_expression
2772 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2778 return fold (build1 (code, type,
2779 eval_subst (TREE_OPERAND (arg, 0),
2780 old0, new0, old1, new1)));
2783 return fold (build2 (code, type,
2784 eval_subst (TREE_OPERAND (arg, 0),
2785 old0, new0, old1, new1),
2786 eval_subst (TREE_OPERAND (arg, 1),
2787 old0, new0, old1, new1)));
2789 case tcc_expression:
2793 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2796 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2799 return fold (build3 (code, type,
2800 eval_subst (TREE_OPERAND (arg, 0),
2801 old0, new0, old1, new1),
2802 eval_subst (TREE_OPERAND (arg, 1),
2803 old0, new0, old1, new1),
2804 eval_subst (TREE_OPERAND (arg, 2),
2805 old0, new0, old1, new1)));
2809 /* Fall through - ??? */
2811 case tcc_comparison:
2813 tree arg0 = TREE_OPERAND (arg, 0);
2814 tree arg1 = TREE_OPERAND (arg, 1);
2816 /* We need to check both for exact equality and tree equality. The
2817 former will be true if the operand has a side-effect. In that
2818 case, we know the operand occurred exactly once. */
2820 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2822 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2825 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2827 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2830 return fold (build2 (code, type, arg0, arg1));
2838 /* Return a tree for the case when the result of an expression is RESULT
2839 converted to TYPE and OMITTED was previously an operand of the expression
2840 but is now not needed (e.g., we folded OMITTED * 0).
2842 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2843 the conversion of RESULT to TYPE. */
2846 omit_one_operand (tree type, tree result, tree omitted)
2848 tree t = fold_convert (type, result);
2850 if (TREE_SIDE_EFFECTS (omitted))
2851 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2853 return non_lvalue (t);
2856 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2859 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2861 tree t = fold_convert (type, result);
2863 if (TREE_SIDE_EFFECTS (omitted))
2864 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2866 return pedantic_non_lvalue (t);
2869 /* Return a tree for the case when the result of an expression is RESULT
2870 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2871 of the expression but are now not needed.
2873 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2874 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2875 evaluated before OMITTED2. Otherwise, if neither has side effects,
2876 just do the conversion of RESULT to TYPE. */
2879 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2881 tree t = fold_convert (type, result);
2883 if (TREE_SIDE_EFFECTS (omitted2))
2884 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2885 if (TREE_SIDE_EFFECTS (omitted1))
2886 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2888 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2892 /* Return a simplified tree node for the truth-negation of ARG. This
2893 never alters ARG itself. We assume that ARG is an operation that
2894 returns a truth value (0 or 1).
2896 FIXME: one would think we would fold the result, but it causes
2897 problems with the dominator optimizer. */
2899 invert_truthvalue (tree arg)
2901 tree type = TREE_TYPE (arg);
2902 enum tree_code code = TREE_CODE (arg);
2904 if (code == ERROR_MARK)
2907 /* If this is a comparison, we can simply invert it, except for
2908 floating-point non-equality comparisons, in which case we just
2909 enclose a TRUTH_NOT_EXPR around what we have. */
2911 if (TREE_CODE_CLASS (code) == tcc_comparison)
2913 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2914 if (FLOAT_TYPE_P (op_type)
2915 && flag_trapping_math
2916 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2917 && code != NE_EXPR && code != EQ_EXPR)
2918 return build1 (TRUTH_NOT_EXPR, type, arg);
2921 code = invert_tree_comparison (code,
2922 HONOR_NANS (TYPE_MODE (op_type)));
2923 if (code == ERROR_MARK)
2924 return build1 (TRUTH_NOT_EXPR, type, arg);
2926 return build2 (code, type,
2927 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2934 return fold_convert (type,
2935 build_int_cst (NULL_TREE, integer_zerop (arg)));
2937 case TRUTH_AND_EXPR:
2938 return build2 (TRUTH_OR_EXPR, type,
2939 invert_truthvalue (TREE_OPERAND (arg, 0)),
2940 invert_truthvalue (TREE_OPERAND (arg, 1)));
2943 return build2 (TRUTH_AND_EXPR, type,
2944 invert_truthvalue (TREE_OPERAND (arg, 0)),
2945 invert_truthvalue (TREE_OPERAND (arg, 1)));
2947 case TRUTH_XOR_EXPR:
2948 /* Here we can invert either operand. We invert the first operand
2949 unless the second operand is a TRUTH_NOT_EXPR in which case our
2950 result is the XOR of the first operand with the inside of the
2951 negation of the second operand. */
2953 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2954 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2955 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2957 return build2 (TRUTH_XOR_EXPR, type,
2958 invert_truthvalue (TREE_OPERAND (arg, 0)),
2959 TREE_OPERAND (arg, 1));
2961 case TRUTH_ANDIF_EXPR:
2962 return build2 (TRUTH_ORIF_EXPR, type,
2963 invert_truthvalue (TREE_OPERAND (arg, 0)),
2964 invert_truthvalue (TREE_OPERAND (arg, 1)));
2966 case TRUTH_ORIF_EXPR:
2967 return build2 (TRUTH_ANDIF_EXPR, type,
2968 invert_truthvalue (TREE_OPERAND (arg, 0)),
2969 invert_truthvalue (TREE_OPERAND (arg, 1)));
2971 case TRUTH_NOT_EXPR:
2972 return TREE_OPERAND (arg, 0);
2975 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2976 invert_truthvalue (TREE_OPERAND (arg, 1)),
2977 invert_truthvalue (TREE_OPERAND (arg, 2)));
2980 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2981 invert_truthvalue (TREE_OPERAND (arg, 1)));
2983 case NON_LVALUE_EXPR:
2984 return invert_truthvalue (TREE_OPERAND (arg, 0));
2987 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2992 return build1 (TREE_CODE (arg), type,
2993 invert_truthvalue (TREE_OPERAND (arg, 0)));
2996 if (!integer_onep (TREE_OPERAND (arg, 1)))
2998 return build2 (EQ_EXPR, type, arg,
2999 fold_convert (type, integer_zero_node));
3002 return build1 (TRUTH_NOT_EXPR, type, arg);
3004 case CLEANUP_POINT_EXPR:
3005 return build1 (CLEANUP_POINT_EXPR, type,
3006 invert_truthvalue (TREE_OPERAND (arg, 0)));
3011 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3012 return build1 (TRUTH_NOT_EXPR, type, arg);
3015 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3016 operands are another bit-wise operation with a common input. If so,
3017 distribute the bit operations to save an operation and possibly two if
3018 constants are involved. For example, convert
3019 (A | B) & (A | C) into A | (B & C)
3020 Further simplification will occur if B and C are constants.
3022 If this optimization cannot be done, 0 will be returned. */
3025 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3030 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3031 || TREE_CODE (arg0) == code
3032 || (TREE_CODE (arg0) != BIT_AND_EXPR
3033 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3036 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3038 common = TREE_OPERAND (arg0, 0);
3039 left = TREE_OPERAND (arg0, 1);
3040 right = TREE_OPERAND (arg1, 1);
3042 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3044 common = TREE_OPERAND (arg0, 0);
3045 left = TREE_OPERAND (arg0, 1);
3046 right = TREE_OPERAND (arg1, 0);
3048 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3050 common = TREE_OPERAND (arg0, 1);
3051 left = TREE_OPERAND (arg0, 0);
3052 right = TREE_OPERAND (arg1, 1);
3054 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3056 common = TREE_OPERAND (arg0, 1);
3057 left = TREE_OPERAND (arg0, 0);
3058 right = TREE_OPERAND (arg1, 0);
3063 return fold (build2 (TREE_CODE (arg0), type, common,
3064 fold (build2 (code, type, left, right))));
3067 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3068 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3071 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3074 tree result = build3 (BIT_FIELD_REF, type, inner,
3075 size_int (bitsize), bitsize_int (bitpos));
3077 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3082 /* Optimize a bit-field compare.
3084 There are two cases: First is a compare against a constant and the
3085 second is a comparison of two items where the fields are at the same
3086 bit position relative to the start of a chunk (byte, halfword, word)
3087 large enough to contain it. In these cases we can avoid the shift
3088 implicit in bitfield extractions.
3090 For constants, we emit a compare of the shifted constant with the
3091 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3092 compared. For two fields at the same position, we do the ANDs with the
3093 similar mask and compare the result of the ANDs.
3095 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3096 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3097 are the left and right operands of the comparison, respectively.
3099 If the optimization described above can be done, we return the resulting
3100 tree. Otherwise we return zero. */
3103 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3106 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3107 tree type = TREE_TYPE (lhs);
3108 tree signed_type, unsigned_type;
3109 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3110 enum machine_mode lmode, rmode, nmode;
3111 int lunsignedp, runsignedp;
3112 int lvolatilep = 0, rvolatilep = 0;
3113 tree linner, rinner = NULL_TREE;
3117 /* Get all the information about the extractions being done. If the bit size
3118 if the same as the size of the underlying object, we aren't doing an
3119 extraction at all and so can do nothing. We also don't want to
3120 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3121 then will no longer be able to replace it. */
3122 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3123 &lunsignedp, &lvolatilep, false);
3124 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3125 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3130 /* If this is not a constant, we can only do something if bit positions,
3131 sizes, and signedness are the same. */
3132 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3133 &runsignedp, &rvolatilep, false);
3135 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3136 || lunsignedp != runsignedp || offset != 0
3137 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3141 /* See if we can find a mode to refer to this field. We should be able to,
3142 but fail if we can't. */
3143 nmode = get_best_mode (lbitsize, lbitpos,
3144 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3145 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3146 TYPE_ALIGN (TREE_TYPE (rinner))),
3147 word_mode, lvolatilep || rvolatilep);
3148 if (nmode == VOIDmode)
3151 /* Set signed and unsigned types of the precision of this mode for the
3153 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3154 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3156 /* Compute the bit position and size for the new reference and our offset
3157 within it. If the new reference is the same size as the original, we
3158 won't optimize anything, so return zero. */
3159 nbitsize = GET_MODE_BITSIZE (nmode);
3160 nbitpos = lbitpos & ~ (nbitsize - 1);
3162 if (nbitsize == lbitsize)
3165 if (BYTES_BIG_ENDIAN)
3166 lbitpos = nbitsize - lbitsize - lbitpos;
3168 /* Make the mask to be used against the extracted field. */
3169 mask = build_int_cst (unsigned_type, -1);
3170 mask = force_fit_type (mask, 0, false, false);
3171 mask = fold_convert (unsigned_type, mask);
3172 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3173 mask = const_binop (RSHIFT_EXPR, mask,
3174 size_int (nbitsize - lbitsize - lbitpos), 0);
3177 /* If not comparing with constant, just rework the comparison
3179 return build2 (code, compare_type,
3180 build2 (BIT_AND_EXPR, unsigned_type,
3181 make_bit_field_ref (linner, unsigned_type,
3182 nbitsize, nbitpos, 1),
3184 build2 (BIT_AND_EXPR, unsigned_type,
3185 make_bit_field_ref (rinner, unsigned_type,
3186 nbitsize, nbitpos, 1),
3189 /* Otherwise, we are handling the constant case. See if the constant is too
3190 big for the field. Warn and return a tree of for 0 (false) if so. We do
3191 this not only for its own sake, but to avoid having to test for this
3192 error case below. If we didn't, we might generate wrong code.
3194 For unsigned fields, the constant shifted right by the field length should
3195 be all zero. For signed fields, the high-order bits should agree with
3200 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3201 fold_convert (unsigned_type, rhs),
3202 size_int (lbitsize), 0)))
3204 warning ("comparison is always %d due to width of bit-field",
3206 return constant_boolean_node (code == NE_EXPR, compare_type);
3211 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3212 size_int (lbitsize - 1), 0);
3213 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3215 warning ("comparison is always %d due to width of bit-field",
3217 return constant_boolean_node (code == NE_EXPR, compare_type);
3221 /* Single-bit compares should always be against zero. */
3222 if (lbitsize == 1 && ! integer_zerop (rhs))
3224 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3225 rhs = fold_convert (type, integer_zero_node);
3228 /* Make a new bitfield reference, shift the constant over the
3229 appropriate number of bits and mask it with the computed mask
3230 (in case this was a signed field). If we changed it, make a new one. */
3231 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3234 TREE_SIDE_EFFECTS (lhs) = 1;
3235 TREE_THIS_VOLATILE (lhs) = 1;
3238 rhs = fold (const_binop (BIT_AND_EXPR,
3239 const_binop (LSHIFT_EXPR,
3240 fold_convert (unsigned_type, rhs),
3241 size_int (lbitpos), 0),
3244 return build2 (code, compare_type,
3245 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3249 /* Subroutine for fold_truthop: decode a field reference.
3251 If EXP is a comparison reference, we return the innermost reference.
3253 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3254 set to the starting bit number.
3256 If the innermost field can be completely contained in a mode-sized
3257 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3259 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3260 otherwise it is not changed.
3262 *PUNSIGNEDP is set to the signedness of the field.
3264 *PMASK is set to the mask used. This is either contained in a
3265 BIT_AND_EXPR or derived from the width of the field.
3267 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3269 Return 0 if this is not a component reference or is one that we can't
3270 do anything with. */
3273 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3274 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3275 int *punsignedp, int *pvolatilep,
3276 tree *pmask, tree *pand_mask)
3278 tree outer_type = 0;
3280 tree mask, inner, offset;
3282 unsigned int precision;
3284 /* All the optimizations using this function assume integer fields.
3285 There are problems with FP fields since the type_for_size call
3286 below can fail for, e.g., XFmode. */
3287 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3290 /* We are interested in the bare arrangement of bits, so strip everything
3291 that doesn't affect the machine mode. However, record the type of the
3292 outermost expression if it may matter below. */
3293 if (TREE_CODE (exp) == NOP_EXPR
3294 || TREE_CODE (exp) == CONVERT_EXPR
3295 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3296 outer_type = TREE_TYPE (exp);
3299 if (TREE_CODE (exp) == BIT_AND_EXPR)
3301 and_mask = TREE_OPERAND (exp, 1);
3302 exp = TREE_OPERAND (exp, 0);
3303 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3304 if (TREE_CODE (and_mask) != INTEGER_CST)
3308 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3309 punsignedp, pvolatilep, false);
3310 if ((inner == exp && and_mask == 0)
3311 || *pbitsize < 0 || offset != 0
3312 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3315 /* If the number of bits in the reference is the same as the bitsize of
3316 the outer type, then the outer type gives the signedness. Otherwise
3317 (in case of a small bitfield) the signedness is unchanged. */
3318 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3319 *punsignedp = TYPE_UNSIGNED (outer_type);
3321 /* Compute the mask to access the bitfield. */
3322 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3323 precision = TYPE_PRECISION (unsigned_type);
3325 mask = build_int_cst (unsigned_type, -1);
3326 mask = force_fit_type (mask, 0, false, false);
3328 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3329 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3331 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3333 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3334 fold_convert (unsigned_type, and_mask), mask));
3337 *pand_mask = and_mask;
3341 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3345 all_ones_mask_p (tree mask, int size)
3347 tree type = TREE_TYPE (mask);
3348 unsigned int precision = TYPE_PRECISION (type);
3351 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3352 tmask = force_fit_type (tmask, 0, false, false);
3355 tree_int_cst_equal (mask,
3356 const_binop (RSHIFT_EXPR,
3357 const_binop (LSHIFT_EXPR, tmask,
3358 size_int (precision - size),
3360 size_int (precision - size), 0));
3363 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3364 represents the sign bit of EXP's type. If EXP represents a sign
3365 or zero extension, also test VAL against the unextended type.
3366 The return value is the (sub)expression whose sign bit is VAL,
3367 or NULL_TREE otherwise. */
3370 sign_bit_p (tree exp, tree val)
3372 unsigned HOST_WIDE_INT mask_lo, lo;
3373 HOST_WIDE_INT mask_hi, hi;
3377 /* Tree EXP must have an integral type. */
3378 t = TREE_TYPE (exp);
3379 if (! INTEGRAL_TYPE_P (t))
3382 /* Tree VAL must be an integer constant. */
3383 if (TREE_CODE (val) != INTEGER_CST
3384 || TREE_CONSTANT_OVERFLOW (val))
3387 width = TYPE_PRECISION (t);
3388 if (width > HOST_BITS_PER_WIDE_INT)
3390 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3393 mask_hi = ((unsigned HOST_WIDE_INT) -1
3394 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3400 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3403 mask_lo = ((unsigned HOST_WIDE_INT) -1
3404 >> (HOST_BITS_PER_WIDE_INT - width));
3407 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3408 treat VAL as if it were unsigned. */
3409 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3410 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3413 /* Handle extension from a narrower type. */
3414 if (TREE_CODE (exp) == NOP_EXPR
3415 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3416 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3421 /* Subroutine for fold_truthop: determine if an operand is simple enough
3422 to be evaluated unconditionally. */
3425 simple_operand_p (tree exp)
3427 /* Strip any conversions that don't change the machine mode. */
3430 return (CONSTANT_CLASS_P (exp)
3431 || TREE_CODE (exp) == SSA_NAME
3433 && ! TREE_ADDRESSABLE (exp)
3434 && ! TREE_THIS_VOLATILE (exp)
3435 && ! DECL_NONLOCAL (exp)
3436 /* Don't regard global variables as simple. They may be
3437 allocated in ways unknown to the compiler (shared memory,
3438 #pragma weak, etc). */
3439 && ! TREE_PUBLIC (exp)
3440 && ! DECL_EXTERNAL (exp)
3441 /* Loading a static variable is unduly expensive, but global
3442 registers aren't expensive. */
3443 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3446 /* The following functions are subroutines to fold_range_test and allow it to
3447 try to change a logical combination of comparisons into a range test.
3450 X == 2 || X == 3 || X == 4 || X == 5
3454 (unsigned) (X - 2) <= 3
3456 We describe each set of comparisons as being either inside or outside
3457 a range, using a variable named like IN_P, and then describe the
3458 range with a lower and upper bound. If one of the bounds is omitted,
3459 it represents either the highest or lowest value of the type.
3461 In the comments below, we represent a range by two numbers in brackets
3462 preceded by a "+" to designate being inside that range, or a "-" to
3463 designate being outside that range, so the condition can be inverted by
3464 flipping the prefix. An omitted bound is represented by a "-". For
3465 example, "- [-, 10]" means being outside the range starting at the lowest
3466 possible value and ending at 10, in other words, being greater than 10.
3467 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3470 We set up things so that the missing bounds are handled in a consistent
3471 manner so neither a missing bound nor "true" and "false" need to be
3472 handled using a special case. */
3474 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3475 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3476 and UPPER1_P are nonzero if the respective argument is an upper bound
3477 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3478 must be specified for a comparison. ARG1 will be converted to ARG0's
3479 type if both are specified. */
3482 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3483 tree arg1, int upper1_p)
3489 /* If neither arg represents infinity, do the normal operation.
3490 Else, if not a comparison, return infinity. Else handle the special
3491 comparison rules. Note that most of the cases below won't occur, but
3492 are handled for consistency. */
3494 if (arg0 != 0 && arg1 != 0)
3496 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3497 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3499 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3502 if (TREE_CODE_CLASS (code) != tcc_comparison)
3505 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3506 for neither. In real maths, we cannot assume open ended ranges are
3507 the same. But, this is computer arithmetic, where numbers are finite.
3508 We can therefore make the transformation of any unbounded range with
3509 the value Z, Z being greater than any representable number. This permits
3510 us to treat unbounded ranges as equal. */
3511 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3512 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3516 result = sgn0 == sgn1;
3519 result = sgn0 != sgn1;
3522 result = sgn0 < sgn1;
3525 result = sgn0 <= sgn1;
3528 result = sgn0 > sgn1;
3531 result = sgn0 >= sgn1;
3537 return constant_boolean_node (result, type);
3540 /* Given EXP, a logical expression, set the range it is testing into
3541 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3542 actually being tested. *PLOW and *PHIGH will be made of the same type
3543 as the returned expression. If EXP is not a comparison, we will most
3544 likely not be returning a useful value and range. */
3547 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3549 enum tree_code code;
3550 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3551 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3553 tree low, high, n_low, n_high;
3555 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3556 and see if we can refine the range. Some of the cases below may not
3557 happen, but it doesn't seem worth worrying about this. We "continue"
3558 the outer loop when we've changed something; otherwise we "break"
3559 the switch, which will "break" the while. */
3562 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3566 code = TREE_CODE (exp);
3567 exp_type = TREE_TYPE (exp);
3569 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3571 if (TREE_CODE_LENGTH (code) > 0)
3572 arg0 = TREE_OPERAND (exp, 0);
3573 if (TREE_CODE_CLASS (code) == tcc_comparison
3574 || TREE_CODE_CLASS (code) == tcc_unary
3575 || TREE_CODE_CLASS (code) == tcc_binary)
3576 arg0_type = TREE_TYPE (arg0);
3577 if (TREE_CODE_CLASS (code) == tcc_binary
3578 || TREE_CODE_CLASS (code) == tcc_comparison
3579 || (TREE_CODE_CLASS (code) == tcc_expression
3580 && TREE_CODE_LENGTH (code) > 1))
3581 arg1 = TREE_OPERAND (exp, 1);
3586 case TRUTH_NOT_EXPR:
3587 in_p = ! in_p, exp = arg0;
3590 case EQ_EXPR: case NE_EXPR:
3591 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3592 /* We can only do something if the range is testing for zero
3593 and if the second operand is an integer constant. Note that
3594 saying something is "in" the range we make is done by
3595 complementing IN_P since it will set in the initial case of
3596 being not equal to zero; "out" is leaving it alone. */
3597 if (low == 0 || high == 0
3598 || ! integer_zerop (low) || ! integer_zerop (high)
3599 || TREE_CODE (arg1) != INTEGER_CST)
3604 case NE_EXPR: /* - [c, c] */
3607 case EQ_EXPR: /* + [c, c] */
3608 in_p = ! in_p, low = high = arg1;
3610 case GT_EXPR: /* - [-, c] */
3611 low = 0, high = arg1;
3613 case GE_EXPR: /* + [c, -] */
3614 in_p = ! in_p, low = arg1, high = 0;
3616 case LT_EXPR: /* - [c, -] */
3617 low = arg1, high = 0;
3619 case LE_EXPR: /* + [-, c] */
3620 in_p = ! in_p, low = 0, high = arg1;
3626 /* If this is an unsigned comparison, we also know that EXP is
3627 greater than or equal to zero. We base the range tests we make
3628 on that fact, so we record it here so we can parse existing
3629 range tests. We test arg0_type since often the return type
3630 of, e.g. EQ_EXPR, is boolean. */
3631 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3633 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3635 fold_convert (arg0_type, integer_zero_node),
3639 in_p = n_in_p, low = n_low, high = n_high;
3641 /* If the high bound is missing, but we have a nonzero low
3642 bound, reverse the range so it goes from zero to the low bound
3644 if (high == 0 && low && ! integer_zerop (low))
3647 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3648 integer_one_node, 0);
3649 low = fold_convert (arg0_type, integer_zero_node);
3657 /* (-x) IN [a,b] -> x in [-b, -a] */
3658 n_low = range_binop (MINUS_EXPR, exp_type,
3659 fold_convert (exp_type, integer_zero_node),
3661 n_high = range_binop (MINUS_EXPR, exp_type,
3662 fold_convert (exp_type, integer_zero_node),
3664 low = n_low, high = n_high;
3670 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3671 fold_convert (exp_type, integer_one_node));
3674 case PLUS_EXPR: case MINUS_EXPR:
3675 if (TREE_CODE (arg1) != INTEGER_CST)
3678 /* If EXP is signed, any overflow in the computation is undefined,
3679 so we don't worry about it so long as our computations on
3680 the bounds don't overflow. For unsigned, overflow is defined
3681 and this is exactly the right thing. */
3682 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3683 arg0_type, low, 0, arg1, 0);
3684 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3685 arg0_type, high, 1, arg1, 0);
3686 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3687 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3690 /* Check for an unsigned range which has wrapped around the maximum
3691 value thus making n_high < n_low, and normalize it. */
3692 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3694 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3695 integer_one_node, 0);
3696 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3697 integer_one_node, 0);
3699 /* If the range is of the form +/- [ x+1, x ], we won't
3700 be able to normalize it. But then, it represents the
3701 whole range or the empty set, so make it
3703 if (tree_int_cst_equal (n_low, low)
3704 && tree_int_cst_equal (n_high, high))
3710 low = n_low, high = n_high;
3715 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3716 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3719 if (! INTEGRAL_TYPE_P (arg0_type)
3720 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3721 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3724 n_low = low, n_high = high;
3727 n_low = fold_convert (arg0_type, n_low);
3730 n_high = fold_convert (arg0_type, n_high);
3733 /* If we're converting arg0 from an unsigned type, to exp,
3734 a signed type, we will be doing the comparison as unsigned.
3735 The tests above have already verified that LOW and HIGH
3738 So we have to ensure that we will handle large unsigned
3739 values the same way that the current signed bounds treat
3742 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3745 tree equiv_type = lang_hooks.types.type_for_mode
3746 (TYPE_MODE (arg0_type), 1);
3748 /* A range without an upper bound is, naturally, unbounded.
3749 Since convert would have cropped a very large value, use
3750 the max value for the destination type. */
3752 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3753 : TYPE_MAX_VALUE (arg0_type);
3755 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3756 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3757 fold_convert (arg0_type,
3759 fold_convert (arg0_type,
3760 integer_one_node)));
3762 /* If the low bound is specified, "and" the range with the
3763 range for which the original unsigned value will be
3767 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3768 1, n_low, n_high, 1,
3769 fold_convert (arg0_type,
3774 in_p = (n_in_p == in_p);
3778 /* Otherwise, "or" the range with the range of the input
3779 that will be interpreted as negative. */
3780 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3781 0, n_low, n_high, 1,
3782 fold_convert (arg0_type,
3787 in_p = (in_p != n_in_p);
3792 low = n_low, high = n_high;
3802 /* If EXP is a constant, we can evaluate whether this is true or false. */
3803 if (TREE_CODE (exp) == INTEGER_CST)
3805 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3807 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3813 *pin_p = in_p, *plow = low, *phigh = high;
3817 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3818 type, TYPE, return an expression to test if EXP is in (or out of, depending
3819 on IN_P) the range. Return 0 if the test couldn't be created. */
3822 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3824 tree etype = TREE_TYPE (exp);
3829 value = build_range_check (type, exp, 1, low, high);
3831 return invert_truthvalue (value);
3836 if (low == 0 && high == 0)
3837 return fold_convert (type, integer_one_node);
3840 return fold (build2 (LE_EXPR, type, exp, high));
3843 return fold (build2 (GE_EXPR, type, exp, low));
3845 if (operand_equal_p (low, high, 0))
3846 return fold (build2 (EQ_EXPR, type, exp, low));
3848 if (integer_zerop (low))
3850 if (! TYPE_UNSIGNED (etype))
3852 etype = lang_hooks.types.unsigned_type (etype);
3853 high = fold_convert (etype, high);
3854 exp = fold_convert (etype, exp);
3856 return build_range_check (type, exp, 1, 0, high);
3859 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3860 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3862 unsigned HOST_WIDE_INT lo;
3866 prec = TYPE_PRECISION (etype);
3867 if (prec <= HOST_BITS_PER_WIDE_INT)
3870 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3874 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3875 lo = (unsigned HOST_WIDE_INT) -1;
3878 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3880 if (TYPE_UNSIGNED (etype))
3882 etype = lang_hooks.types.signed_type (etype);
3883 exp = fold_convert (etype, exp);
3885 return fold (build2 (GT_EXPR, type, exp,
3886 fold_convert (etype, integer_zero_node)));
3890 value = const_binop (MINUS_EXPR, high, low, 0);
3891 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3893 tree utype, minv, maxv;
3895 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3896 for the type in question, as we rely on this here. */
3897 switch (TREE_CODE (etype))
3902 utype = lang_hooks.types.unsigned_type (etype);
3903 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3904 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3905 integer_one_node, 1);
3906 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3907 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3911 high = fold_convert (etype, high);
3912 low = fold_convert (etype, low);
3913 exp = fold_convert (etype, exp);
3914 value = const_binop (MINUS_EXPR, high, low, 0);
3922 if (value != 0 && ! TREE_OVERFLOW (value))
3923 return build_range_check (type,
3924 fold (build2 (MINUS_EXPR, etype, exp, low)),
3925 1, fold_convert (etype, integer_zero_node),
3931 /* Given two ranges, see if we can merge them into one. Return 1 if we
3932 can, 0 if we can't. Set the output range into the specified parameters. */
3935 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3936 tree high0, int in1_p, tree low1, tree high1)
3944 int lowequal = ((low0 == 0 && low1 == 0)
3945 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3946 low0, 0, low1, 0)));
3947 int highequal = ((high0 == 0 && high1 == 0)
3948 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3949 high0, 1, high1, 1)));
3951 /* Make range 0 be the range that starts first, or ends last if they
3952 start at the same value. Swap them if it isn't. */
3953 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3956 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3957 high1, 1, high0, 1))))
3959 temp = in0_p, in0_p = in1_p, in1_p = temp;
3960 tem = low0, low0 = low1, low1 = tem;
3961 tem = high0, high0 = high1, high1 = tem;
3964 /* Now flag two cases, whether the ranges are disjoint or whether the
3965 second range is totally subsumed in the first. Note that the tests
3966 below are simplified by the ones above. */
3967 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3968 high0, 1, low1, 0));
3969 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3970 high1, 1, high0, 1));
3972 /* We now have four cases, depending on whether we are including or
3973 excluding the two ranges. */
3976 /* If they don't overlap, the result is false. If the second range
3977 is a subset it is the result. Otherwise, the range is from the start
3978 of the second to the end of the first. */
3980 in_p = 0, low = high = 0;
3982 in_p = 1, low = low1, high = high1;
3984 in_p = 1, low = low1, high = high0;
3987 else if (in0_p && ! in1_p)
3989 /* If they don't overlap, the result is the first range. If they are
3990 equal, the result is false. If the second range is a subset of the
3991 first, and the ranges begin at the same place, we go from just after
3992 the end of the first range to the end of the second. If the second
3993 range is not a subset of the first, or if it is a subset and both
3994 ranges end at the same place, the range starts at the start of the
3995 first range and ends just before the second range.
3996 Otherwise, we can't describe this as a single range. */
3998 in_p = 1, low = low0, high = high0;
3999 else if (lowequal && highequal)
4000 in_p = 0, low = high = 0;
4001 else if (subset && lowequal)
4003 in_p = 1, high = high0;
4004 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4005 integer_one_node, 0);
4007 else if (! subset || highequal)
4009 in_p = 1, low = low0;
4010 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4011 integer_one_node, 0);
4017 else if (! in0_p && in1_p)
4019 /* If they don't overlap, the result is the second range. If the second
4020 is a subset of the first, the result is false. Otherwise,
4021 the range starts just after the first range and ends at the
4022 end of the second. */
4024 in_p = 1, low = low1, high = high1;
4025 else if (subset || highequal)
4026 in_p = 0, low = high = 0;
4029 in_p = 1, high = high1;
4030 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4031 integer_one_node, 0);
4037 /* The case where we are excluding both ranges. Here the complex case
4038 is if they don't overlap. In that case, the only time we have a
4039 range is if they are adjacent. If the second is a subset of the
4040 first, the result is the first. Otherwise, the range to exclude
4041 starts at the beginning of the first range and ends at the end of the
4045 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4046 range_binop (PLUS_EXPR, NULL_TREE,
4048 integer_one_node, 1),
4050 in_p = 0, low = low0, high = high1;
4053 /* Canonicalize - [min, x] into - [-, x]. */
4054 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4055 switch (TREE_CODE (TREE_TYPE (low0)))
4058 if (TYPE_PRECISION (TREE_TYPE (low0))
4059 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4064 if (tree_int_cst_equal (low0,
4065 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4069 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4070 && integer_zerop (low0))
4077 /* Canonicalize - [x, max] into - [x, -]. */
4078 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4079 switch (TREE_CODE (TREE_TYPE (high1)))
4082 if (TYPE_PRECISION (TREE_TYPE (high1))
4083 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4088 if (tree_int_cst_equal (high1,
4089 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4093 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4094 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4096 integer_one_node, 1)))
4103 /* The ranges might be also adjacent between the maximum and
4104 minimum values of the given type. For
4105 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4106 return + [x + 1, y - 1]. */
4107 if (low0 == 0 && high1 == 0)
4109 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4110 integer_one_node, 1);
4111 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4112 integer_one_node, 0);
4113 if (low == 0 || high == 0)
4123 in_p = 0, low = low0, high = high0;
4125 in_p = 0, low = low0, high = high1;
4128 *pin_p = in_p, *plow = low, *phigh = high;
4133 /* Subroutine of fold, looking inside expressions of the form
4134 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4135 of the COND_EXPR. This function is being used also to optimize
4136 A op B ? C : A, by reversing the comparison first.
4138 Return a folded expression whose code is not a COND_EXPR
4139 anymore, or NULL_TREE if no folding opportunity is found. */
4142 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4144 enum tree_code comp_code = TREE_CODE (arg0);
4145 tree arg00 = TREE_OPERAND (arg0, 0);
4146 tree arg01 = TREE_OPERAND (arg0, 1);
4147 tree arg1_type = TREE_TYPE (arg1);
4153 /* If we have A op 0 ? A : -A, consider applying the following
4156 A == 0? A : -A same as -A
4157 A != 0? A : -A same as A
4158 A >= 0? A : -A same as abs (A)
4159 A > 0? A : -A same as abs (A)
4160 A <= 0? A : -A same as -abs (A)
4161 A < 0? A : -A same as -abs (A)
4163 None of these transformations work for modes with signed
4164 zeros. If A is +/-0, the first two transformations will
4165 change the sign of the result (from +0 to -0, or vice
4166 versa). The last four will fix the sign of the result,
4167 even though the original expressions could be positive or
4168 negative, depending on the sign of A.
4170 Note that all these transformations are correct if A is
4171 NaN, since the two alternatives (A and -A) are also NaNs. */
4172 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4173 ? real_zerop (arg01)
4174 : integer_zerop (arg01))
4175 && TREE_CODE (arg2) == NEGATE_EXPR
4176 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4181 tem = fold_convert (arg1_type, arg1);
4182 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4185 return pedantic_non_lvalue (fold_convert (type, arg1));
4188 if (flag_trapping_math)
4193 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4194 arg1 = fold_convert (lang_hooks.types.signed_type
4195 (TREE_TYPE (arg1)), arg1);
4196 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4197 return pedantic_non_lvalue (fold_convert (type, tem));
4200 if (flag_trapping_math)
4204 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4205 arg1 = fold_convert (lang_hooks.types.signed_type
4206 (TREE_TYPE (arg1)), arg1);
4207 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4208 return negate_expr (fold_convert (type, tem));
4210 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4214 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4215 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4216 both transformations are correct when A is NaN: A != 0
4217 is then true, and A == 0 is false. */
4219 if (integer_zerop (arg01) && integer_zerop (arg2))
4221 if (comp_code == NE_EXPR)
4222 return pedantic_non_lvalue (fold_convert (type, arg1));
4223 else if (comp_code == EQ_EXPR)
4224 return fold_convert (type, integer_zero_node);
4227 /* Try some transformations of A op B ? A : B.
4229 A == B? A : B same as B
4230 A != B? A : B same as A
4231 A >= B? A : B same as max (A, B)
4232 A > B? A : B same as max (B, A)
4233 A <= B? A : B same as min (A, B)
4234 A < B? A : B same as min (B, A)
4236 As above, these transformations don't work in the presence
4237 of signed zeros. For example, if A and B are zeros of
4238 opposite sign, the first two transformations will change
4239 the sign of the result. In the last four, the original
4240 expressions give different results for (A=+0, B=-0) and
4241 (A=-0, B=+0), but the transformed expressions do not.
4243 The first two transformations are correct if either A or B
4244 is a NaN. In the first transformation, the condition will
4245 be false, and B will indeed be chosen. In the case of the
4246 second transformation, the condition A != B will be true,
4247 and A will be chosen.
4249 The conversions to max() and min() are not correct if B is
4250 a number and A is not. The conditions in the original
4251 expressions will be false, so all four give B. The min()
4252 and max() versions would give a NaN instead. */
4253 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4255 tree comp_op0 = arg00;
4256 tree comp_op1 = arg01;
4257 tree comp_type = TREE_TYPE (comp_op0);
4259 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4260 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4270 return pedantic_non_lvalue (fold_convert (type, arg2));
4272 return pedantic_non_lvalue (fold_convert (type, arg1));
4277 /* In C++ a ?: expression can be an lvalue, so put the
4278 operand which will be used if they are equal first
4279 so that we can convert this back to the
4280 corresponding COND_EXPR. */
4281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4283 comp_op0 = fold_convert (comp_type, comp_op0);
4284 comp_op1 = fold_convert (comp_type, comp_op1);
4285 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4286 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4287 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4288 return pedantic_non_lvalue (fold_convert (type, tem));
4295 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4297 comp_op0 = fold_convert (comp_type, comp_op0);
4298 comp_op1 = fold_convert (comp_type, comp_op1);
4299 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4300 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4301 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4302 return pedantic_non_lvalue (fold_convert (type, tem));
4306 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4307 return pedantic_non_lvalue (fold_convert (type, arg2));
4310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4311 return pedantic_non_lvalue (fold_convert (type, arg1));
4314 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4319 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4320 we might still be able to simplify this. For example,
4321 if C1 is one less or one more than C2, this might have started
4322 out as a MIN or MAX and been transformed by this function.
4323 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4325 if (INTEGRAL_TYPE_P (type)
4326 && TREE_CODE (arg01) == INTEGER_CST
4327 && TREE_CODE (arg2) == INTEGER_CST)
4331 /* We can replace A with C1 in this case. */
4332 arg1 = fold_convert (type, arg01);
4333 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4336 /* If C1 is C2 + 1, this is min(A, C2). */
4337 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4339 && operand_equal_p (arg01,
4340 const_binop (PLUS_EXPR, arg2,
4341 integer_one_node, 0),
4343 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4344 type, arg1, arg2)));
4348 /* If C1 is C2 - 1, this is min(A, C2). */
4349 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4351 && operand_equal_p (arg01,
4352 const_binop (MINUS_EXPR, arg2,
4353 integer_one_node, 0),
4355 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4356 type, arg1, arg2)));
4360 /* If C1 is C2 - 1, this is max(A, C2). */
4361 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4363 && operand_equal_p (arg01,
4364 const_binop (MINUS_EXPR, arg2,
4365 integer_one_node, 0),
4367 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4368 type, arg1, arg2)));
4372 /* If C1 is C2 + 1, this is max(A, C2). */
4373 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4375 && operand_equal_p (arg01,
4376 const_binop (PLUS_EXPR, arg2,
4377 integer_one_node, 0),
4379 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4380 type, arg1, arg2)));
4393 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4394 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4397 /* EXP is some logical combination of boolean tests. See if we can
4398 merge it into some range test. Return the new tree if so. */
4401 fold_range_test (tree exp)
4403 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4404 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4405 int in0_p, in1_p, in_p;
4406 tree low0, low1, low, high0, high1, high;
4407 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4408 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4411 /* If this is an OR operation, invert both sides; we will invert
4412 again at the end. */
4414 in0_p = ! in0_p, in1_p = ! in1_p;
4416 /* If both expressions are the same, if we can merge the ranges, and we
4417 can build the range test, return it or it inverted. If one of the
4418 ranges is always true or always false, consider it to be the same
4419 expression as the other. */
4420 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4421 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4423 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4425 : rhs != 0 ? rhs : integer_zero_node,
4427 return or_op ? invert_truthvalue (tem) : tem;
4429 /* On machines where the branch cost is expensive, if this is a
4430 short-circuited branch and the underlying object on both sides
4431 is the same, make a non-short-circuit operation. */
4432 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4433 && lhs != 0 && rhs != 0
4434 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4435 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4436 && operand_equal_p (lhs, rhs, 0))
4438 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4439 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4440 which cases we can't do this. */
4441 if (simple_operand_p (lhs))
4442 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4443 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4444 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4445 TREE_OPERAND (exp, 1));
4447 else if (lang_hooks.decls.global_bindings_p () == 0
4448 && ! CONTAINS_PLACEHOLDER_P (lhs))
4450 tree common = save_expr (lhs);
4452 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4453 or_op ? ! in0_p : in0_p,
4455 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4456 or_op ? ! in1_p : in1_p,
4458 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4460 TREE_TYPE (exp), lhs, rhs);
4467 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4468 bit value. Arrange things so the extra bits will be set to zero if and
4469 only if C is signed-extended to its full width. If MASK is nonzero,
4470 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4473 unextend (tree c, int p, int unsignedp, tree mask)
4475 tree type = TREE_TYPE (c);
4476 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4479 if (p == modesize || unsignedp)
4482 /* We work by getting just the sign bit into the low-order bit, then
4483 into the high-order bit, then sign-extend. We then XOR that value
4485 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4486 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4488 /* We must use a signed type in order to get an arithmetic right shift.
4489 However, we must also avoid introducing accidental overflows, so that
4490 a subsequent call to integer_zerop will work. Hence we must
4491 do the type conversion here. At this point, the constant is either
4492 zero or one, and the conversion to a signed type can never overflow.
4493 We could get an overflow if this conversion is done anywhere else. */
4494 if (TYPE_UNSIGNED (type))
4495 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4497 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4498 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4500 temp = const_binop (BIT_AND_EXPR, temp,
4501 fold_convert (TREE_TYPE (c), mask), 0);
4502 /* If necessary, convert the type back to match the type of C. */
4503 if (TYPE_UNSIGNED (type))
4504 temp = fold_convert (type, temp);
4506 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4509 /* Find ways of folding logical expressions of LHS and RHS:
4510 Try to merge two comparisons to the same innermost item.
4511 Look for range tests like "ch >= '0' && ch <= '9'".
4512 Look for combinations of simple terms on machines with expensive branches
4513 and evaluate the RHS unconditionally.
4515 For example, if we have p->a == 2 && p->b == 4 and we can make an
4516 object large enough to span both A and B, we can do this with a comparison
4517 against the object ANDed with the a mask.
4519 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4520 operations to do this with one comparison.
4522 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4523 function and the one above.
4525 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4526 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4528 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4531 We return the simplified tree or 0 if no optimization is possible. */
4534 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4536 /* If this is the "or" of two comparisons, we can do something if
4537 the comparisons are NE_EXPR. If this is the "and", we can do something
4538 if the comparisons are EQ_EXPR. I.e.,
4539 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4541 WANTED_CODE is this operation code. For single bit fields, we can
4542 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4543 comparison for one-bit fields. */
4545 enum tree_code wanted_code;
4546 enum tree_code lcode, rcode;
4547 tree ll_arg, lr_arg, rl_arg, rr_arg;
4548 tree ll_inner, lr_inner, rl_inner, rr_inner;
4549 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4550 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4551 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4552 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4553 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4554 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4555 enum machine_mode lnmode, rnmode;
4556 tree ll_mask, lr_mask, rl_mask, rr_mask;
4557 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4558 tree l_const, r_const;
4559 tree lntype, rntype, result;
4560 int first_bit, end_bit;
4563 /* Start by getting the comparison codes. Fail if anything is volatile.
4564 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4565 it were surrounded with a NE_EXPR. */
4567 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4570 lcode = TREE_CODE (lhs);
4571 rcode = TREE_CODE (rhs);
4573 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4575 lhs = build2 (NE_EXPR, truth_type, lhs,
4576 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4580 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4582 rhs = build2 (NE_EXPR, truth_type, rhs,
4583 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4587 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4588 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4591 ll_arg = TREE_OPERAND (lhs, 0);
4592 lr_arg = TREE_OPERAND (lhs, 1);
4593 rl_arg = TREE_OPERAND (rhs, 0);
4594 rr_arg = TREE_OPERAND (rhs, 1);
4596 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4597 if (simple_operand_p (ll_arg)
4598 && simple_operand_p (lr_arg))
4601 if (operand_equal_p (ll_arg, rl_arg, 0)
4602 && operand_equal_p (lr_arg, rr_arg, 0))
4604 result = combine_comparisons (code, lcode, rcode,
4605 truth_type, ll_arg, lr_arg);
4609 else if (operand_equal_p (ll_arg, rr_arg, 0)
4610 && operand_equal_p (lr_arg, rl_arg, 0))
4612 result = combine_comparisons (code, lcode,
4613 swap_tree_comparison (rcode),
4614 truth_type, ll_arg, lr_arg);
4620 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4621 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4623 /* If the RHS can be evaluated unconditionally and its operands are
4624 simple, it wins to evaluate the RHS unconditionally on machines
4625 with expensive branches. In this case, this isn't a comparison
4626 that can be merged. Avoid doing this if the RHS is a floating-point
4627 comparison since those can trap. */
4629 if (BRANCH_COST >= 2
4630 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4631 && simple_operand_p (rl_arg)
4632 && simple_operand_p (rr_arg))
4634 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4635 if (code == TRUTH_OR_EXPR
4636 && lcode == NE_EXPR && integer_zerop (lr_arg)
4637 && rcode == NE_EXPR && integer_zerop (rr_arg)
4638 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4639 return build2 (NE_EXPR, truth_type,
4640 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4642 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4644 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4645 if (code == TRUTH_AND_EXPR
4646 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4647 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4648 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4649 return build2 (EQ_EXPR, truth_type,
4650 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4652 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4654 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4655 return build2 (code, truth_type, lhs, rhs);
4658 /* See if the comparisons can be merged. Then get all the parameters for
4661 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4662 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4666 ll_inner = decode_field_reference (ll_arg,
4667 &ll_bitsize, &ll_bitpos, &ll_mode,
4668 &ll_unsignedp, &volatilep, &ll_mask,
4670 lr_inner = decode_field_reference (lr_arg,
4671 &lr_bitsize, &lr_bitpos, &lr_mode,
4672 &lr_unsignedp, &volatilep, &lr_mask,
4674 rl_inner = decode_field_reference (rl_arg,
4675 &rl_bitsize, &rl_bitpos, &rl_mode,
4676 &rl_unsignedp, &volatilep, &rl_mask,
4678 rr_inner = decode_field_reference (rr_arg,
4679 &rr_bitsize, &rr_bitpos, &rr_mode,
4680 &rr_unsignedp, &volatilep, &rr_mask,
4683 /* It must be true that the inner operation on the lhs of each
4684 comparison must be the same if we are to be able to do anything.
4685 Then see if we have constants. If not, the same must be true for
4687 if (volatilep || ll_inner == 0 || rl_inner == 0
4688 || ! operand_equal_p (ll_inner, rl_inner, 0))
4691 if (TREE_CODE (lr_arg) == INTEGER_CST
4692 && TREE_CODE (rr_arg) == INTEGER_CST)
4693 l_const = lr_arg, r_const = rr_arg;
4694 else if (lr_inner == 0 || rr_inner == 0
4695 || ! operand_equal_p (lr_inner, rr_inner, 0))
4698 l_const = r_const = 0;
4700 /* If either comparison code is not correct for our logical operation,
4701 fail. However, we can convert a one-bit comparison against zero into
4702 the opposite comparison against that bit being set in the field. */
4704 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4705 if (lcode != wanted_code)
4707 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4709 /* Make the left operand unsigned, since we are only interested
4710 in the value of one bit. Otherwise we are doing the wrong
4719 /* This is analogous to the code for l_const above. */
4720 if (rcode != wanted_code)
4722 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4731 /* After this point all optimizations will generate bit-field
4732 references, which we might not want. */
4733 if (! lang_hooks.can_use_bit_fields_p ())
4736 /* See if we can find a mode that contains both fields being compared on
4737 the left. If we can't, fail. Otherwise, update all constants and masks
4738 to be relative to a field of that size. */
4739 first_bit = MIN (ll_bitpos, rl_bitpos);
4740 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4741 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4742 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4744 if (lnmode == VOIDmode)
4747 lnbitsize = GET_MODE_BITSIZE (lnmode);
4748 lnbitpos = first_bit & ~ (lnbitsize - 1);
4749 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4750 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4752 if (BYTES_BIG_ENDIAN)
4754 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4755 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4758 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4759 size_int (xll_bitpos), 0);
4760 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4761 size_int (xrl_bitpos), 0);
4765 l_const = fold_convert (lntype, l_const);
4766 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4767 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4768 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4769 fold (build1 (BIT_NOT_EXPR,
4773 warning ("comparison is always %d", wanted_code == NE_EXPR);
4775 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4780 r_const = fold_convert (lntype, r_const);
4781 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4782 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4783 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4784 fold (build1 (BIT_NOT_EXPR,
4788 warning ("comparison is always %d", wanted_code == NE_EXPR);
4790 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4794 /* If the right sides are not constant, do the same for it. Also,
4795 disallow this optimization if a size or signedness mismatch occurs
4796 between the left and right sides. */
4799 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4800 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4801 /* Make sure the two fields on the right
4802 correspond to the left without being swapped. */
4803 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4806 first_bit = MIN (lr_bitpos, rr_bitpos);
4807 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4808 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4809 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4811 if (rnmode == VOIDmode)
4814 rnbitsize = GET_MODE_BITSIZE (rnmode);
4815 rnbitpos = first_bit & ~ (rnbitsize - 1);
4816 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4817 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4819 if (BYTES_BIG_ENDIAN)
4821 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4822 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4825 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4826 size_int (xlr_bitpos), 0);
4827 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4828 size_int (xrr_bitpos), 0);
4830 /* Make a mask that corresponds to both fields being compared.
4831 Do this for both items being compared. If the operands are the
4832 same size and the bits being compared are in the same position
4833 then we can do this by masking both and comparing the masked
4835 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4836 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4837 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4839 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4840 ll_unsignedp || rl_unsignedp);
4841 if (! all_ones_mask_p (ll_mask, lnbitsize))
4842 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4844 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4845 lr_unsignedp || rr_unsignedp);
4846 if (! all_ones_mask_p (lr_mask, rnbitsize))
4847 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4849 return build2 (wanted_code, truth_type, lhs, rhs);
4852 /* There is still another way we can do something: If both pairs of
4853 fields being compared are adjacent, we may be able to make a wider
4854 field containing them both.
4856 Note that we still must mask the lhs/rhs expressions. Furthermore,
4857 the mask must be shifted to account for the shift done by
4858 make_bit_field_ref. */
4859 if ((ll_bitsize + ll_bitpos == rl_bitpos
4860 && lr_bitsize + lr_bitpos == rr_bitpos)
4861 || (ll_bitpos == rl_bitpos + rl_bitsize
4862 && lr_bitpos == rr_bitpos + rr_bitsize))
4866 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4867 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4868 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4869 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4871 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4872 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4873 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4874 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4876 /* Convert to the smaller type before masking out unwanted bits. */
4878 if (lntype != rntype)
4880 if (lnbitsize > rnbitsize)
4882 lhs = fold_convert (rntype, lhs);
4883 ll_mask = fold_convert (rntype, ll_mask);
4886 else if (lnbitsize < rnbitsize)
4888 rhs = fold_convert (lntype, rhs);
4889 lr_mask = fold_convert (lntype, lr_mask);
4894 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4895 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4897 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4898 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4900 return build2 (wanted_code, truth_type, lhs, rhs);
4906 /* Handle the case of comparisons with constants. If there is something in
4907 common between the masks, those bits of the constants must be the same.
4908 If not, the condition is always false. Test for this to avoid generating
4909 incorrect code below. */
4910 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4911 if (! integer_zerop (result)
4912 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4913 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4915 if (wanted_code == NE_EXPR)
4917 warning ("%<or%> of unmatched not-equal tests is always 1");
4918 return constant_boolean_node (true, truth_type);
4922 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4923 return constant_boolean_node (false, truth_type);
4927 /* Construct the expression we will return. First get the component
4928 reference we will make. Unless the mask is all ones the width of
4929 that field, perform the mask operation. Then compare with the
4931 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4932 ll_unsignedp || rl_unsignedp);
4934 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4935 if (! all_ones_mask_p (ll_mask, lnbitsize))
4936 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4938 return build2 (wanted_code, truth_type, result,
4939 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4942 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4946 optimize_minmax_comparison (tree t)
4948 tree type = TREE_TYPE (t);
4949 tree arg0 = TREE_OPERAND (t, 0);
4950 enum tree_code op_code;
4951 tree comp_const = TREE_OPERAND (t, 1);
4953 int consts_equal, consts_lt;
4956 STRIP_SIGN_NOPS (arg0);
4958 op_code = TREE_CODE (arg0);
4959 minmax_const = TREE_OPERAND (arg0, 1);
4960 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4961 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4962 inner = TREE_OPERAND (arg0, 0);
4964 /* If something does not permit us to optimize, return the original tree. */
4965 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4966 || TREE_CODE (comp_const) != INTEGER_CST
4967 || TREE_CONSTANT_OVERFLOW (comp_const)
4968 || TREE_CODE (minmax_const) != INTEGER_CST
4969 || TREE_CONSTANT_OVERFLOW (minmax_const))
4972 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4973 and GT_EXPR, doing the rest with recursive calls using logical
4975 switch (TREE_CODE (t))
4977 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4979 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4983 fold (build2 (TRUTH_ORIF_EXPR, type,
4984 optimize_minmax_comparison
4985 (build2 (EQ_EXPR, type, arg0, comp_const)),
4986 optimize_minmax_comparison
4987 (build2 (GT_EXPR, type, arg0, comp_const))));
4990 if (op_code == MAX_EXPR && consts_equal)
4991 /* MAX (X, 0) == 0 -> X <= 0 */
4992 return fold (build2 (LE_EXPR, type, inner, comp_const));
4994 else if (op_code == MAX_EXPR && consts_lt)
4995 /* MAX (X, 0) == 5 -> X == 5 */
4996 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4998 else if (op_code == MAX_EXPR)
4999 /* MAX (X, 0) == -1 -> false */
5000 return omit_one_operand (type, integer_zero_node, inner);
5002 else if (consts_equal)
5003 /* MIN (X, 0) == 0 -> X >= 0 */
5004 return fold (build2 (GE_EXPR, type, inner, comp_const));
5007 /* MIN (X, 0) == 5 -> false */
5008 return omit_one_operand (type, integer_zero_node, inner);
5011 /* MIN (X, 0) == -1 -> X == -1 */
5012 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5015 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5016 /* MAX (X, 0) > 0 -> X > 0
5017 MAX (X, 0) > 5 -> X > 5 */
5018 return fold (build2 (GT_EXPR, type, inner, comp_const));
5020 else if (op_code == MAX_EXPR)
5021 /* MAX (X, 0) > -1 -> true */
5022 return omit_one_operand (type, integer_one_node, inner);
5024 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5025 /* MIN (X, 0) > 0 -> false
5026 MIN (X, 0) > 5 -> false */
5027 return omit_one_operand (type, integer_zero_node, inner);
5030 /* MIN (X, 0) > -1 -> X > -1 */
5031 return fold (build2 (GT_EXPR, type, inner, comp_const));
5038 /* T is an integer expression that is being multiplied, divided, or taken a
5039 modulus (CODE says which and what kind of divide or modulus) by a
5040 constant C. See if we can eliminate that operation by folding it with
5041 other operations already in T. WIDE_TYPE, if non-null, is a type that
5042 should be used for the computation if wider than our type.
5044 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5045 (X * 2) + (Y * 4). We must, however, be assured that either the original
5046 expression would not overflow or that overflow is undefined for the type
5047 in the language in question.
5049 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5050 the machine has a multiply-accumulate insn or that this is part of an
5051 addressing calculation.
5053 If we return a non-null expression, it is an equivalent form of the
5054 original computation, but need not be in the original type. */
5057 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5059 /* To avoid exponential search depth, refuse to allow recursion past
5060 three levels. Beyond that (1) it's highly unlikely that we'll find
5061 something interesting and (2) we've probably processed it before
5062 when we built the inner expression. */
5071 ret = extract_muldiv_1 (t, c, code, wide_type);
5078 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5080 tree type = TREE_TYPE (t);
5081 enum tree_code tcode = TREE_CODE (t);
5082 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5083 > GET_MODE_SIZE (TYPE_MODE (type)))
5084 ? wide_type : type);
5086 int same_p = tcode == code;
5087 tree op0 = NULL_TREE, op1 = NULL_TREE;
5089 /* Don't deal with constants of zero here; they confuse the code below. */
5090 if (integer_zerop (c))
5093 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5094 op0 = TREE_OPERAND (t, 0);
5096 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5097 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5099 /* Note that we need not handle conditional operations here since fold
5100 already handles those cases. So just do arithmetic here. */
5104 /* For a constant, we can always simplify if we are a multiply
5105 or (for divide and modulus) if it is a multiple of our constant. */
5106 if (code == MULT_EXPR
5107 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5108 return const_binop (code, fold_convert (ctype, t),
5109 fold_convert (ctype, c), 0);
5112 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5113 /* If op0 is an expression ... */
5114 if ((COMPARISON_CLASS_P (op0)
5115 || UNARY_CLASS_P (op0)
5116 || BINARY_CLASS_P (op0)
5117 || EXPRESSION_CLASS_P (op0))
5118 /* ... and is unsigned, and its type is smaller than ctype,
5119 then we cannot pass through as widening. */
5120 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5121 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5122 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5123 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5124 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5125 /* ... or this is a truncation (t is narrower than op0),
5126 then we cannot pass through this narrowing. */
5127 || (GET_MODE_SIZE (TYPE_MODE (type))
5128 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5129 /* ... or signedness changes for division or modulus,
5130 then we cannot pass through this conversion. */
5131 || (code != MULT_EXPR
5132 && (TYPE_UNSIGNED (ctype)
5133 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5136 /* Pass the constant down and see if we can make a simplification. If
5137 we can, replace this expression with the inner simplification for
5138 possible later conversion to our or some other type. */
5139 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5140 && TREE_CODE (t2) == INTEGER_CST
5141 && ! TREE_CONSTANT_OVERFLOW (t2)
5142 && (0 != (t1 = extract_muldiv (op0, t2, code,
5144 ? ctype : NULL_TREE))))
5149 /* If widening the type changes it from signed to unsigned, then we
5150 must avoid building ABS_EXPR itself as unsigned. */
5151 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5153 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5154 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5156 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5157 return fold_convert (ctype, t1);
5163 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5164 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5167 case MIN_EXPR: case MAX_EXPR:
5168 /* If widening the type changes the signedness, then we can't perform
5169 this optimization as that changes the result. */
5170 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5173 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5174 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5175 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5177 if (tree_int_cst_sgn (c) < 0)
5178 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5180 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5181 fold_convert (ctype, t2)));
5185 case LSHIFT_EXPR: case RSHIFT_EXPR:
5186 /* If the second operand is constant, this is a multiplication
5187 or floor division, by a power of two, so we can treat it that
5188 way unless the multiplier or divisor overflows. Signed
5189 left-shift overflow is implementation-defined rather than
5190 undefined in C90, so do not convert signed left shift into
5192 if (TREE_CODE (op1) == INTEGER_CST
5193 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5194 /* const_binop may not detect overflow correctly,
5195 so check for it explicitly here. */
5196 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5197 && TREE_INT_CST_HIGH (op1) == 0
5198 && 0 != (t1 = fold_convert (ctype,
5199 const_binop (LSHIFT_EXPR,
5202 && ! TREE_OVERFLOW (t1))
5203 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5204 ? MULT_EXPR : FLOOR_DIV_EXPR,
5205 ctype, fold_convert (ctype, op0), t1),
5206 c, code, wide_type);
5209 case PLUS_EXPR: case MINUS_EXPR:
5210 /* See if we can eliminate the operation on both sides. If we can, we
5211 can return a new PLUS or MINUS. If we can't, the only remaining
5212 cases where we can do anything are if the second operand is a
5214 t1 = extract_muldiv (op0, c, code, wide_type);
5215 t2 = extract_muldiv (op1, c, code, wide_type);
5216 if (t1 != 0 && t2 != 0
5217 && (code == MULT_EXPR
5218 /* If not multiplication, we can only do this if both operands
5219 are divisible by c. */
5220 || (multiple_of_p (ctype, op0, c)
5221 && multiple_of_p (ctype, op1, c))))
5222 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5223 fold_convert (ctype, t2)));
5225 /* If this was a subtraction, negate OP1 and set it to be an addition.
5226 This simplifies the logic below. */
5227 if (tcode == MINUS_EXPR)
5228 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5230 if (TREE_CODE (op1) != INTEGER_CST)
5233 /* If either OP1 or C are negative, this optimization is not safe for
5234 some of the division and remainder types while for others we need
5235 to change the code. */
5236 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5238 if (code == CEIL_DIV_EXPR)
5239 code = FLOOR_DIV_EXPR;
5240 else if (code == FLOOR_DIV_EXPR)
5241 code = CEIL_DIV_EXPR;
5242 else if (code != MULT_EXPR
5243 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5247 /* If it's a multiply or a division/modulus operation of a multiple
5248 of our constant, do the operation and verify it doesn't overflow. */
5249 if (code == MULT_EXPR
5250 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5252 op1 = const_binop (code, fold_convert (ctype, op1),
5253 fold_convert (ctype, c), 0);
5254 /* We allow the constant to overflow with wrapping semantics. */
5256 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5262 /* If we have an unsigned type is not a sizetype, we cannot widen
5263 the operation since it will change the result if the original
5264 computation overflowed. */
5265 if (TYPE_UNSIGNED (ctype)
5266 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5270 /* If we were able to eliminate our operation from the first side,
5271 apply our operation to the second side and reform the PLUS. */
5272 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5273 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5275 /* The last case is if we are a multiply. In that case, we can
5276 apply the distributive law to commute the multiply and addition
5277 if the multiplication of the constants doesn't overflow. */
5278 if (code == MULT_EXPR)
5279 return fold (build2 (tcode, ctype,
5280 fold (build2 (code, ctype,
5281 fold_convert (ctype, op0),
5282 fold_convert (ctype, c))),
5288 /* We have a special case here if we are doing something like
5289 (C * 8) % 4 since we know that's zero. */
5290 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5291 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5292 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5293 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5294 return omit_one_operand (type, integer_zero_node, op0);
5296 /* ... fall through ... */
5298 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5299 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5300 /* If we can extract our operation from the LHS, do so and return a
5301 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5302 do something only if the second operand is a constant. */
5304 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5305 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5306 fold_convert (ctype, op1)));
5307 else if (tcode == MULT_EXPR && code == MULT_EXPR
5308 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5309 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5310 fold_convert (ctype, t1)));
5311 else if (TREE_CODE (op1) != INTEGER_CST)
5314 /* If these are the same operation types, we can associate them
5315 assuming no overflow. */
5317 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5318 fold_convert (ctype, c), 0))
5319 && ! TREE_OVERFLOW (t1))
5320 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5322 /* If these operations "cancel" each other, we have the main
5323 optimizations of this pass, which occur when either constant is a
5324 multiple of the other, in which case we replace this with either an
5325 operation or CODE or TCODE.
5327 If we have an unsigned type that is not a sizetype, we cannot do
5328 this since it will change the result if the original computation
5330 if ((! TYPE_UNSIGNED (ctype)
5331 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5333 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5334 || (tcode == MULT_EXPR
5335 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5336 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5338 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5339 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5340 fold_convert (ctype,
5341 const_binop (TRUNC_DIV_EXPR,
5343 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5344 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5345 fold_convert (ctype,
5346 const_binop (TRUNC_DIV_EXPR,
5358 /* Return a node which has the indicated constant VALUE (either 0 or
5359 1), and is of the indicated TYPE. */
5362 constant_boolean_node (int value, tree type)
5364 if (type == integer_type_node)
5365 return value ? integer_one_node : integer_zero_node;
5366 else if (type == boolean_type_node)
5367 return value ? boolean_true_node : boolean_false_node;
5368 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5369 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5370 : integer_zero_node);
5372 return build_int_cst (type, value);
5375 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5376 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5377 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5378 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5379 COND is the first argument to CODE; otherwise (as in the example
5380 given here), it is the second argument. TYPE is the type of the
5381 original expression. Return NULL_TREE if no simplification is
5385 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5386 tree cond, tree arg, int cond_first_p)
5388 tree test, true_value, false_value;
5389 tree lhs = NULL_TREE;
5390 tree rhs = NULL_TREE;
5392 /* This transformation is only worthwhile if we don't have to wrap
5393 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5394 one of the branches once its pushed inside the COND_EXPR. */
5395 if (!TREE_CONSTANT (arg))
5398 if (TREE_CODE (cond) == COND_EXPR)
5400 test = TREE_OPERAND (cond, 0);
5401 true_value = TREE_OPERAND (cond, 1);
5402 false_value = TREE_OPERAND (cond, 2);
5403 /* If this operand throws an expression, then it does not make
5404 sense to try to perform a logical or arithmetic operation
5406 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5408 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5413 tree testtype = TREE_TYPE (cond);
5415 true_value = constant_boolean_node (true, testtype);
5416 false_value = constant_boolean_node (false, testtype);
5420 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5421 : build2 (code, type, arg, true_value));
5423 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5424 : build2 (code, type, arg, false_value));
5426 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5427 return fold_convert (type, test);
5431 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5433 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5434 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5435 ADDEND is the same as X.
5437 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5438 and finite. The problematic cases are when X is zero, and its mode
5439 has signed zeros. In the case of rounding towards -infinity,
5440 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5441 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5444 fold_real_zero_addition_p (tree type, tree addend, int negate)
5446 if (!real_zerop (addend))
5449 /* Don't allow the fold with -fsignaling-nans. */
5450 if (HONOR_SNANS (TYPE_MODE (type)))
5453 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5454 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5457 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5458 if (TREE_CODE (addend) == REAL_CST
5459 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5462 /* The mode has signed zeros, and we have to honor their sign.
5463 In this situation, there is only one case we can return true for.
5464 X - 0 is the same as X unless rounding towards -infinity is
5466 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5469 /* Subroutine of fold() that checks comparisons of built-in math
5470 functions against real constants.
5472 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5473 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5474 is the type of the result and ARG0 and ARG1 are the operands of the
5475 comparison. ARG1 must be a TREE_REAL_CST.
5477 The function returns the constant folded tree if a simplification
5478 can be made, and NULL_TREE otherwise. */
5481 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5482 tree type, tree arg0, tree arg1)
5486 if (BUILTIN_SQRT_P (fcode))
5488 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5489 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5491 c = TREE_REAL_CST (arg1);
5492 if (REAL_VALUE_NEGATIVE (c))
5494 /* sqrt(x) < y is always false, if y is negative. */
5495 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5496 return omit_one_operand (type, integer_zero_node, arg);
5498 /* sqrt(x) > y is always true, if y is negative and we
5499 don't care about NaNs, i.e. negative values of x. */
5500 if (code == NE_EXPR || !HONOR_NANS (mode))
5501 return omit_one_operand (type, integer_one_node, arg);
5503 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5504 return fold (build2 (GE_EXPR, type, arg,
5505 build_real (TREE_TYPE (arg), dconst0)));
5507 else if (code == GT_EXPR || code == GE_EXPR)
5511 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5512 real_convert (&c2, mode, &c2);
5514 if (REAL_VALUE_ISINF (c2))
5516 /* sqrt(x) > y is x == +Inf, when y is very large. */
5517 if (HONOR_INFINITIES (mode))
5518 return fold (build2 (EQ_EXPR, type, arg,
5519 build_real (TREE_TYPE (arg), c2)));
5521 /* sqrt(x) > y is always false, when y is very large
5522 and we don't care about infinities. */
5523 return omit_one_operand (type, integer_zero_node, arg);
5526 /* sqrt(x) > c is the same as x > c*c. */
5527 return fold (build2 (code, type, arg,
5528 build_real (TREE_TYPE (arg), c2)));
5530 else if (code == LT_EXPR || code == LE_EXPR)
5534 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5535 real_convert (&c2, mode, &c2);
5537 if (REAL_VALUE_ISINF (c2))
5539 /* sqrt(x) < y is always true, when y is a very large
5540 value and we don't care about NaNs or Infinities. */
5541 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5542 return omit_one_operand (type, integer_one_node, arg);
5544 /* sqrt(x) < y is x != +Inf when y is very large and we
5545 don't care about NaNs. */
5546 if (! HONOR_NANS (mode))
5547 return fold (build2 (NE_EXPR, type, arg,
5548 build_real (TREE_TYPE (arg), c2)));
5550 /* sqrt(x) < y is x >= 0 when y is very large and we
5551 don't care about Infinities. */
5552 if (! HONOR_INFINITIES (mode))
5553 return fold (build2 (GE_EXPR, type, arg,
5554 build_real (TREE_TYPE (arg), dconst0)));
5556 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5557 if (lang_hooks.decls.global_bindings_p () != 0
5558 || CONTAINS_PLACEHOLDER_P (arg))
5561 arg = save_expr (arg);
5562 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5563 fold (build2 (GE_EXPR, type, arg,
5564 build_real (TREE_TYPE (arg),
5566 fold (build2 (NE_EXPR, type, arg,
5567 build_real (TREE_TYPE (arg),
5571 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5572 if (! HONOR_NANS (mode))
5573 return fold (build2 (code, type, arg,
5574 build_real (TREE_TYPE (arg), c2)));
5576 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5577 if (lang_hooks.decls.global_bindings_p () == 0
5578 && ! CONTAINS_PLACEHOLDER_P (arg))
5580 arg = save_expr (arg);
5581 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5582 fold (build2 (GE_EXPR, type, arg,
5583 build_real (TREE_TYPE (arg),
5585 fold (build2 (code, type, arg,
5586 build_real (TREE_TYPE (arg),
5595 /* Subroutine of fold() that optimizes comparisons against Infinities,
5596 either +Inf or -Inf.
5598 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5599 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5600 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5602 The function returns the constant folded tree if a simplification
5603 can be made, and NULL_TREE otherwise. */
5606 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5608 enum machine_mode mode;
5609 REAL_VALUE_TYPE max;
5613 mode = TYPE_MODE (TREE_TYPE (arg0));
5615 /* For negative infinity swap the sense of the comparison. */
5616 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5618 code = swap_tree_comparison (code);
5623 /* x > +Inf is always false, if with ignore sNANs. */
5624 if (HONOR_SNANS (mode))
5626 return omit_one_operand (type, integer_zero_node, arg0);
5629 /* x <= +Inf is always true, if we don't case about NaNs. */
5630 if (! HONOR_NANS (mode))
5631 return omit_one_operand (type, integer_one_node, arg0);
5633 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5634 if (lang_hooks.decls.global_bindings_p () == 0
5635 && ! CONTAINS_PLACEHOLDER_P (arg0))
5637 arg0 = save_expr (arg0);
5638 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5644 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5645 real_maxval (&max, neg, mode);
5646 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5647 arg0, build_real (TREE_TYPE (arg0), max)));
5650 /* x < +Inf is always equal to x <= DBL_MAX. */
5651 real_maxval (&max, neg, mode);
5652 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5653 arg0, build_real (TREE_TYPE (arg0), max)));
5656 /* x != +Inf is always equal to !(x > DBL_MAX). */
5657 real_maxval (&max, neg, mode);
5658 if (! HONOR_NANS (mode))
5659 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5660 arg0, build_real (TREE_TYPE (arg0), max)));
5662 /* The transformation below creates non-gimple code and thus is
5663 not appropriate if we are in gimple form. */
5667 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5668 arg0, build_real (TREE_TYPE (arg0), max)));
5669 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5678 /* Subroutine of fold() that optimizes comparisons of a division by
5679 a nonzero integer constant against an integer constant, i.e.
5682 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5683 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5684 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5686 The function returns the constant folded tree if a simplification
5687 can be made, and NULL_TREE otherwise. */
5690 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5692 tree prod, tmp, hi, lo;
5693 tree arg00 = TREE_OPERAND (arg0, 0);
5694 tree arg01 = TREE_OPERAND (arg0, 1);
5695 unsigned HOST_WIDE_INT lpart;
5696 HOST_WIDE_INT hpart;
5699 /* We have to do this the hard way to detect unsigned overflow.
5700 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5701 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5702 TREE_INT_CST_HIGH (arg01),
5703 TREE_INT_CST_LOW (arg1),
5704 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5705 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5706 prod = force_fit_type (prod, -1, overflow, false);
5708 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5710 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5713 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5714 overflow = add_double (TREE_INT_CST_LOW (prod),
5715 TREE_INT_CST_HIGH (prod),
5716 TREE_INT_CST_LOW (tmp),
5717 TREE_INT_CST_HIGH (tmp),
5719 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5720 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5721 TREE_CONSTANT_OVERFLOW (prod));
5723 else if (tree_int_cst_sgn (arg01) >= 0)
5725 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5726 switch (tree_int_cst_sgn (arg1))
5729 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5734 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5739 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5749 /* A negative divisor reverses the relational operators. */
5750 code = swap_tree_comparison (code);
5752 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5753 switch (tree_int_cst_sgn (arg1))
5756 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5761 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5766 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5778 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5779 return omit_one_operand (type, integer_zero_node, arg00);
5780 if (TREE_OVERFLOW (hi))
5781 return fold (build2 (GE_EXPR, type, arg00, lo));
5782 if (TREE_OVERFLOW (lo))
5783 return fold (build2 (LE_EXPR, type, arg00, hi));
5784 return build_range_check (type, arg00, 1, lo, hi);
5787 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5788 return omit_one_operand (type, integer_one_node, arg00);
5789 if (TREE_OVERFLOW (hi))
5790 return fold (build2 (LT_EXPR, type, arg00, lo));
5791 if (TREE_OVERFLOW (lo))
5792 return fold (build2 (GT_EXPR, type, arg00, hi));
5793 return build_range_check (type, arg00, 0, lo, hi);
5796 if (TREE_OVERFLOW (lo))
5797 return omit_one_operand (type, integer_zero_node, arg00);
5798 return fold (build2 (LT_EXPR, type, arg00, lo));
5801 if (TREE_OVERFLOW (hi))
5802 return omit_one_operand (type, integer_one_node, arg00);
5803 return fold (build2 (LE_EXPR, type, arg00, hi));
5806 if (TREE_OVERFLOW (hi))
5807 return omit_one_operand (type, integer_zero_node, arg00);
5808 return fold (build2 (GT_EXPR, type, arg00, hi));
5811 if (TREE_OVERFLOW (lo))
5812 return omit_one_operand (type, integer_one_node, arg00);
5813 return fold (build2 (GE_EXPR, type, arg00, lo));
5823 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5824 equality/inequality test, then return a simplified form of
5825 the test using shifts and logical operations. Otherwise return
5826 NULL. TYPE is the desired result type. */
5829 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5832 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5834 if (code == TRUTH_NOT_EXPR)
5836 code = TREE_CODE (arg0);
5837 if (code != NE_EXPR && code != EQ_EXPR)
5840 /* Extract the arguments of the EQ/NE. */
5841 arg1 = TREE_OPERAND (arg0, 1);
5842 arg0 = TREE_OPERAND (arg0, 0);
5844 /* This requires us to invert the code. */
5845 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5848 /* If this is testing a single bit, we can optimize the test. */
5849 if ((code == NE_EXPR || code == EQ_EXPR)
5850 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5851 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5853 tree inner = TREE_OPERAND (arg0, 0);
5854 tree type = TREE_TYPE (arg0);
5855 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5856 enum machine_mode operand_mode = TYPE_MODE (type);
5858 tree signed_type, unsigned_type, intermediate_type;
5861 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5862 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5863 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5864 if (arg00 != NULL_TREE
5865 /* This is only a win if casting to a signed type is cheap,
5866 i.e. when arg00's type is not a partial mode. */
5867 && TYPE_PRECISION (TREE_TYPE (arg00))
5868 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5870 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5871 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5872 result_type, fold_convert (stype, arg00),
5873 fold_convert (stype, integer_zero_node)));
5876 /* Otherwise we have (A & C) != 0 where C is a single bit,
5877 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5878 Similarly for (A & C) == 0. */
5880 /* If INNER is a right shift of a constant and it plus BITNUM does
5881 not overflow, adjust BITNUM and INNER. */
5882 if (TREE_CODE (inner) == RSHIFT_EXPR
5883 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5884 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5885 && bitnum < TYPE_PRECISION (type)
5886 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5887 bitnum - TYPE_PRECISION (type)))
5889 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5890 inner = TREE_OPERAND (inner, 0);
5893 /* If we are going to be able to omit the AND below, we must do our
5894 operations as unsigned. If we must use the AND, we have a choice.
5895 Normally unsigned is faster, but for some machines signed is. */
5896 #ifdef LOAD_EXTEND_OP
5897 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5898 && !flag_syntax_only) ? 0 : 1;
5903 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5904 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5905 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5906 inner = fold_convert (intermediate_type, inner);
5909 inner = build2 (RSHIFT_EXPR, intermediate_type,
5910 inner, size_int (bitnum));
5912 if (code == EQ_EXPR)
5913 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5914 inner, integer_one_node));
5916 /* Put the AND last so it can combine with more things. */
5917 inner = build2 (BIT_AND_EXPR, intermediate_type,
5918 inner, integer_one_node);
5920 /* Make sure to return the proper type. */
5921 inner = fold_convert (result_type, inner);
5928 /* Check whether we are allowed to reorder operands arg0 and arg1,
5929 such that the evaluation of arg1 occurs before arg0. */
5932 reorder_operands_p (tree arg0, tree arg1)
5934 if (! flag_evaluation_order)
5936 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5938 return ! TREE_SIDE_EFFECTS (arg0)
5939 && ! TREE_SIDE_EFFECTS (arg1);
5942 /* Test whether it is preferable two swap two operands, ARG0 and
5943 ARG1, for example because ARG0 is an integer constant and ARG1
5944 isn't. If REORDER is true, only recommend swapping if we can
5945 evaluate the operands in reverse order. */
5948 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5950 STRIP_SIGN_NOPS (arg0);
5951 STRIP_SIGN_NOPS (arg1);
5953 if (TREE_CODE (arg1) == INTEGER_CST)
5955 if (TREE_CODE (arg0) == INTEGER_CST)
5958 if (TREE_CODE (arg1) == REAL_CST)
5960 if (TREE_CODE (arg0) == REAL_CST)
5963 if (TREE_CODE (arg1) == COMPLEX_CST)
5965 if (TREE_CODE (arg0) == COMPLEX_CST)
5968 if (TREE_CONSTANT (arg1))
5970 if (TREE_CONSTANT (arg0))
5976 if (reorder && flag_evaluation_order
5977 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5985 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5986 for commutative and comparison operators. Ensuring a canonical
5987 form allows the optimizers to find additional redundancies without
5988 having to explicitly check for both orderings. */
5989 if (TREE_CODE (arg0) == SSA_NAME
5990 && TREE_CODE (arg1) == SSA_NAME
5991 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5997 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
5998 ARG0 is extended to a wider type. */
6001 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6003 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6005 tree shorter_type, outer_type;
6009 if (arg0_unw == arg0)
6011 shorter_type = TREE_TYPE (arg0_unw);
6013 arg1_unw = get_unwidened (arg1, shorter_type);
6017 /* If possible, express the comparison in the shorter mode. */
6018 if ((code == EQ_EXPR || code == NE_EXPR
6019 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6020 && (TREE_TYPE (arg1_unw) == shorter_type
6021 || (TREE_CODE (arg1_unw) == INTEGER_CST
6022 && TREE_CODE (shorter_type) == INTEGER_TYPE
6023 && int_fits_type_p (arg1_unw, shorter_type))))
6024 return fold (build (code, type, arg0_unw,
6025 fold_convert (shorter_type, arg1_unw)));
6027 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6030 /* If we are comparing with the integer that does not fit into the range
6031 of the shorter type, the result is known. */
6032 outer_type = TREE_TYPE (arg1_unw);
6033 min = lower_bound_in_type (outer_type, shorter_type);
6034 max = upper_bound_in_type (outer_type, shorter_type);
6036 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6038 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6045 return omit_one_operand (type, integer_zero_node, arg0);
6050 return omit_one_operand (type, integer_one_node, arg0);
6056 return omit_one_operand (type, integer_one_node, arg0);
6058 return omit_one_operand (type, integer_zero_node, arg0);
6063 return omit_one_operand (type, integer_zero_node, arg0);
6065 return omit_one_operand (type, integer_one_node, arg0);
6074 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6075 ARG0 just the signedness is changed. */
6078 fold_sign_changed_comparison (enum tree_code code, tree type,
6079 tree arg0, tree arg1)
6081 tree arg0_inner, tmp;
6082 tree inner_type, outer_type;
6084 if (TREE_CODE (arg0) != NOP_EXPR)
6087 outer_type = TREE_TYPE (arg0);
6088 arg0_inner = TREE_OPERAND (arg0, 0);
6089 inner_type = TREE_TYPE (arg0_inner);
6091 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6094 if (TREE_CODE (arg1) != INTEGER_CST
6095 && !(TREE_CODE (arg1) == NOP_EXPR
6096 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6099 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6104 if (TREE_CODE (arg1) == INTEGER_CST)
6106 tmp = build_int_cst_wide (inner_type,
6107 TREE_INT_CST_LOW (arg1),
6108 TREE_INT_CST_HIGH (arg1));
6109 arg1 = force_fit_type (tmp, 0,
6110 TREE_OVERFLOW (arg1),
6111 TREE_CONSTANT_OVERFLOW (arg1));
6114 arg1 = fold_convert (inner_type, arg1);
6116 return fold (build (code, type, arg0_inner, arg1));
6119 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6120 step of the array. TYPE is the type of the expression. ADDR is the address.
6121 MULT is the multiplicative expression. If the function succeeds, the new
6122 address expression is returned. Otherwise NULL_TREE is returned. */
6125 try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
6127 tree s, delta, step;
6128 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6129 tree ref = TREE_OPERAND (addr, 0), pref;
6136 if (TREE_CODE (arg0) == INTEGER_CST)
6141 else if (TREE_CODE (arg1) == INTEGER_CST)
6149 for (;; ref = TREE_OPERAND (ref, 0))
6151 if (TREE_CODE (ref) == ARRAY_REF)
6153 step = array_ref_element_size (ref);
6155 if (TREE_CODE (step) != INTEGER_CST)
6158 itype = TREE_TYPE (step);
6160 /* If the type sizes do not match, we might run into problems
6161 when one of them would overflow. */
6162 if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
6165 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6168 delta = fold_convert (itype, delta);
6172 if (!handled_component_p (ref))
6176 /* We found the suitable array reference. So copy everything up to it,
6177 and replace the index. */
6179 pref = TREE_OPERAND (addr, 0);
6180 ret = copy_node (pref);
6185 pref = TREE_OPERAND (pref, 0);
6186 TREE_OPERAND (pos, 0) = copy_node (pref);
6187 pos = TREE_OPERAND (pos, 0);
6190 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6191 TREE_OPERAND (pos, 1),
6194 return build1 (ADDR_EXPR, type, ret);
6198 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6199 means A >= Y && A != MAX, but in this case we know that
6200 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6203 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6205 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6207 if (TREE_CODE (bound) == LT_EXPR)
6208 a = TREE_OPERAND (bound, 0);
6209 else if (TREE_CODE (bound) == GT_EXPR)
6210 a = TREE_OPERAND (bound, 1);
6214 typea = TREE_TYPE (a);
6215 if (!INTEGRAL_TYPE_P (typea)
6216 && !POINTER_TYPE_P (typea))
6219 if (TREE_CODE (ineq) == LT_EXPR)
6221 a1 = TREE_OPERAND (ineq, 1);
6222 y = TREE_OPERAND (ineq, 0);
6224 else if (TREE_CODE (ineq) == GT_EXPR)
6226 a1 = TREE_OPERAND (ineq, 0);
6227 y = TREE_OPERAND (ineq, 1);
6232 if (TREE_TYPE (a1) != typea)
6235 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6236 if (!integer_onep (diff))
6239 return fold (build2 (GE_EXPR, type, a, y));
6242 /* Perform constant folding and related simplification of EXPR.
6243 The related simplifications include x*1 => x, x*0 => 0, etc.,
6244 and application of the associative law.
6245 NOP_EXPR conversions may be removed freely (as long as we
6246 are careful not to change the type of the overall expression).
6247 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6248 but we can constant-fold them if they have constant operands. */
6250 #ifdef ENABLE_FOLD_CHECKING
6251 # define fold(x) fold_1 (x)
6252 static tree fold_1 (tree);
6258 const tree t = expr;
6259 const tree type = TREE_TYPE (expr);
6260 tree t1 = NULL_TREE;
6262 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6263 enum tree_code code = TREE_CODE (t);
6264 enum tree_code_class kind = TREE_CODE_CLASS (code);
6266 /* WINS will be nonzero when the switch is done
6267 if all operands are constant. */
6270 /* Return right away if a constant. */
6271 if (kind == tcc_constant)
6274 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6278 /* Special case for conversion ops that can have fixed point args. */
6279 arg0 = TREE_OPERAND (t, 0);
6281 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6283 STRIP_SIGN_NOPS (arg0);
6285 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6286 subop = TREE_REALPART (arg0);
6290 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6291 && TREE_CODE (subop) != REAL_CST)
6292 /* Note that TREE_CONSTANT isn't enough:
6293 static var addresses are constant but we can't
6294 do arithmetic on them. */
6297 else if (IS_EXPR_CODE_CLASS (kind))
6299 int len = TREE_CODE_LENGTH (code);
6301 for (i = 0; i < len; i++)
6303 tree op = TREE_OPERAND (t, i);
6307 continue; /* Valid for CALL_EXPR, at least. */
6309 /* Strip any conversions that don't change the mode. This is
6310 safe for every expression, except for a comparison expression
6311 because its signedness is derived from its operands. So, in
6312 the latter case, only strip conversions that don't change the
6315 Note that this is done as an internal manipulation within the
6316 constant folder, in order to find the simplest representation
6317 of the arguments so that their form can be studied. In any
6318 cases, the appropriate type conversions should be put back in
6319 the tree that will get out of the constant folder. */
6320 if (kind == tcc_comparison)
6321 STRIP_SIGN_NOPS (op);
6325 if (TREE_CODE (op) == COMPLEX_CST)
6326 subop = TREE_REALPART (op);
6330 if (TREE_CODE (subop) != INTEGER_CST
6331 && TREE_CODE (subop) != REAL_CST)
6332 /* Note that TREE_CONSTANT isn't enough:
6333 static var addresses are constant but we can't
6334 do arithmetic on them. */
6344 /* If this is a commutative operation, and ARG0 is a constant, move it
6345 to ARG1 to reduce the number of tests below. */
6346 if (commutative_tree_code (code)
6347 && tree_swap_operands_p (arg0, arg1, true))
6348 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6349 TREE_OPERAND (t, 0)));
6351 /* Now WINS is set as described above,
6352 ARG0 is the first operand of EXPR,
6353 and ARG1 is the second operand (if it has more than one operand).
6355 First check for cases where an arithmetic operation is applied to a
6356 compound, conditional, or comparison operation. Push the arithmetic
6357 operation inside the compound or conditional to see if any folding
6358 can then be done. Convert comparison to conditional for this purpose.
6359 The also optimizes non-constant cases that used to be done in
6362 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6363 one of the operands is a comparison and the other is a comparison, a
6364 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6365 code below would make the expression more complex. Change it to a
6366 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6367 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6369 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6370 || code == EQ_EXPR || code == NE_EXPR)
6371 && ((truth_value_p (TREE_CODE (arg0))
6372 && (truth_value_p (TREE_CODE (arg1))
6373 || (TREE_CODE (arg1) == BIT_AND_EXPR
6374 && integer_onep (TREE_OPERAND (arg1, 1)))))
6375 || (truth_value_p (TREE_CODE (arg1))
6376 && (truth_value_p (TREE_CODE (arg0))
6377 || (TREE_CODE (arg0) == BIT_AND_EXPR
6378 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6380 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6381 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6383 type, fold_convert (boolean_type_node, arg0),
6384 fold_convert (boolean_type_node, arg1)));
6386 if (code == EQ_EXPR)
6387 tem = invert_truthvalue (tem);
6392 if (TREE_CODE_CLASS (code) == tcc_unary)
6394 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6395 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6396 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6397 else if (TREE_CODE (arg0) == COND_EXPR)
6399 tree arg01 = TREE_OPERAND (arg0, 1);
6400 tree arg02 = TREE_OPERAND (arg0, 2);
6401 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6402 arg01 = fold (build1 (code, type, arg01));
6403 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6404 arg02 = fold (build1 (code, type, arg02));
6405 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6408 /* If this was a conversion, and all we did was to move into
6409 inside the COND_EXPR, bring it back out. But leave it if
6410 it is a conversion from integer to integer and the
6411 result precision is no wider than a word since such a
6412 conversion is cheap and may be optimized away by combine,
6413 while it couldn't if it were outside the COND_EXPR. Then return
6414 so we don't get into an infinite recursion loop taking the
6415 conversion out and then back in. */
6417 if ((code == NOP_EXPR || code == CONVERT_EXPR
6418 || code == NON_LVALUE_EXPR)
6419 && TREE_CODE (tem) == COND_EXPR
6420 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6421 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6422 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6423 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6424 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6425 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6426 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6428 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6429 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6430 || flag_syntax_only))
6431 tem = build1 (code, type,
6433 TREE_TYPE (TREE_OPERAND
6434 (TREE_OPERAND (tem, 1), 0)),
6435 TREE_OPERAND (tem, 0),
6436 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6437 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6440 else if (COMPARISON_CLASS_P (arg0))
6442 if (TREE_CODE (type) == BOOLEAN_TYPE)
6444 arg0 = copy_node (arg0);
6445 TREE_TYPE (arg0) = type;
6448 else if (TREE_CODE (type) != INTEGER_TYPE)
6449 return fold (build3 (COND_EXPR, type, arg0,
6450 fold (build1 (code, type,
6452 fold (build1 (code, type,
6453 integer_zero_node))));
6456 else if (TREE_CODE_CLASS (code) == tcc_comparison
6457 && TREE_CODE (arg0) == COMPOUND_EXPR)
6458 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6459 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6460 else if (TREE_CODE_CLASS (code) == tcc_comparison
6461 && TREE_CODE (arg1) == COMPOUND_EXPR)
6462 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6463 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6464 else if (TREE_CODE_CLASS (code) == tcc_binary
6465 || TREE_CODE_CLASS (code) == tcc_comparison)
6467 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6468 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6469 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6471 if (TREE_CODE (arg1) == COMPOUND_EXPR
6472 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6473 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6474 fold (build2 (code, type,
6475 arg0, TREE_OPERAND (arg1, 1))));
6477 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6479 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6480 /*cond_first_p=*/1);
6481 if (tem != NULL_TREE)
6485 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6487 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6488 /*cond_first_p=*/0);
6489 if (tem != NULL_TREE)
6497 return fold (DECL_INITIAL (t));
6502 case FIX_TRUNC_EXPR:
6504 case FIX_FLOOR_EXPR:
6505 case FIX_ROUND_EXPR:
6506 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6507 return TREE_OPERAND (t, 0);
6509 /* Handle cases of two conversions in a row. */
6510 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6511 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6513 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6514 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6515 int inside_int = INTEGRAL_TYPE_P (inside_type);
6516 int inside_ptr = POINTER_TYPE_P (inside_type);
6517 int inside_float = FLOAT_TYPE_P (inside_type);
6518 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6519 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6520 int inter_int = INTEGRAL_TYPE_P (inter_type);
6521 int inter_ptr = POINTER_TYPE_P (inter_type);
6522 int inter_float = FLOAT_TYPE_P (inter_type);
6523 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6524 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6525 int final_int = INTEGRAL_TYPE_P (type);
6526 int final_ptr = POINTER_TYPE_P (type);
6527 int final_float = FLOAT_TYPE_P (type);
6528 unsigned int final_prec = TYPE_PRECISION (type);
6529 int final_unsignedp = TYPE_UNSIGNED (type);
6531 /* In addition to the cases of two conversions in a row
6532 handled below, if we are converting something to its own
6533 type via an object of identical or wider precision, neither
6534 conversion is needed. */
6535 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6536 && ((inter_int && final_int) || (inter_float && final_float))
6537 && inter_prec >= final_prec)
6538 return fold (build1 (code, type,
6539 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6541 /* Likewise, if the intermediate and final types are either both
6542 float or both integer, we don't need the middle conversion if
6543 it is wider than the final type and doesn't change the signedness
6544 (for integers). Avoid this if the final type is a pointer
6545 since then we sometimes need the inner conversion. Likewise if
6546 the outer has a precision not equal to the size of its mode. */
6547 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6548 || (inter_float && inside_float))
6549 && inter_prec >= inside_prec
6550 && (inter_float || inter_unsignedp == inside_unsignedp)
6551 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6552 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6554 return fold (build1 (code, type,
6555 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6557 /* If we have a sign-extension of a zero-extended value, we can
6558 replace that by a single zero-extension. */
6559 if (inside_int && inter_int && final_int
6560 && inside_prec < inter_prec && inter_prec < final_prec
6561 && inside_unsignedp && !inter_unsignedp)
6562 return fold (build1 (code, type,
6563 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6565 /* Two conversions in a row are not needed unless:
6566 - some conversion is floating-point (overstrict for now), or
6567 - the intermediate type is narrower than both initial and
6569 - the intermediate type and innermost type differ in signedness,
6570 and the outermost type is wider than the intermediate, or
6571 - the initial type is a pointer type and the precisions of the
6572 intermediate and final types differ, or
6573 - the final type is a pointer type and the precisions of the
6574 initial and intermediate types differ. */
6575 if (! inside_float && ! inter_float && ! final_float
6576 && (inter_prec > inside_prec || inter_prec > final_prec)
6577 && ! (inside_int && inter_int
6578 && inter_unsignedp != inside_unsignedp
6579 && inter_prec < final_prec)
6580 && ((inter_unsignedp && inter_prec > inside_prec)
6581 == (final_unsignedp && final_prec > inter_prec))
6582 && ! (inside_ptr && inter_prec != final_prec)
6583 && ! (final_ptr && inside_prec != inter_prec)
6584 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6585 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6587 return fold (build1 (code, type,
6588 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6591 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6592 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6593 /* Detect assigning a bitfield. */
6594 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6595 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6597 /* Don't leave an assignment inside a conversion
6598 unless assigning a bitfield. */
6599 tree prev = TREE_OPERAND (t, 0);
6600 tem = copy_node (t);
6601 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6602 /* First do the assignment, then return converted constant. */
6603 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6604 TREE_NO_WARNING (tem) = 1;
6605 TREE_USED (tem) = 1;
6609 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6610 constants (if x has signed type, the sign bit cannot be set
6611 in c). This folds extension into the BIT_AND_EXPR. */
6612 if (INTEGRAL_TYPE_P (type)
6613 && TREE_CODE (type) != BOOLEAN_TYPE
6614 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6615 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6617 tree and = TREE_OPERAND (t, 0);
6618 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6621 if (TYPE_UNSIGNED (TREE_TYPE (and))
6622 || (TYPE_PRECISION (type)
6623 <= TYPE_PRECISION (TREE_TYPE (and))))
6625 else if (TYPE_PRECISION (TREE_TYPE (and1))
6626 <= HOST_BITS_PER_WIDE_INT
6627 && host_integerp (and1, 1))
6629 unsigned HOST_WIDE_INT cst;
6631 cst = tree_low_cst (and1, 1);
6632 cst &= (HOST_WIDE_INT) -1
6633 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6634 change = (cst == 0);
6635 #ifdef LOAD_EXTEND_OP
6637 && !flag_syntax_only
6638 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6641 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6642 and0 = fold_convert (uns, and0);
6643 and1 = fold_convert (uns, and1);
6648 return fold (build2 (BIT_AND_EXPR, type,
6649 fold_convert (type, and0),
6650 fold_convert (type, and1)));
6653 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6654 T2 being pointers to types of the same size. */
6655 if (POINTER_TYPE_P (TREE_TYPE (t))
6656 && BINARY_CLASS_P (arg0)
6657 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6658 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6660 tree arg00 = TREE_OPERAND (arg0, 0);
6661 tree t0 = TREE_TYPE (t);
6662 tree t1 = TREE_TYPE (arg00);
6663 tree tt0 = TREE_TYPE (t0);
6664 tree tt1 = TREE_TYPE (t1);
6665 tree s0 = TYPE_SIZE (tt0);
6666 tree s1 = TYPE_SIZE (tt1);
6668 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6669 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6670 TREE_OPERAND (arg0, 1));
6673 tem = fold_convert_const (code, type, arg0);
6674 return tem ? tem : t;
6676 case VIEW_CONVERT_EXPR:
6677 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6678 return build1 (VIEW_CONVERT_EXPR, type,
6679 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6683 if (TREE_CODE (arg0) == CONSTRUCTOR
6684 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6686 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6688 return TREE_VALUE (m);
6693 if (TREE_CONSTANT (t) != wins)
6695 tem = copy_node (t);
6696 TREE_CONSTANT (tem) = wins;
6697 TREE_INVARIANT (tem) = wins;
6703 if (negate_expr_p (arg0))
6704 return fold_convert (type, negate_expr (arg0));
6708 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6709 return fold_abs_const (arg0, type);
6710 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6711 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6712 /* Convert fabs((double)float) into (double)fabsf(float). */
6713 else if (TREE_CODE (arg0) == NOP_EXPR
6714 && TREE_CODE (type) == REAL_TYPE)
6716 tree targ0 = strip_float_extensions (arg0);
6718 return fold_convert (type, fold (build1 (ABS_EXPR,
6722 else if (tree_expr_nonnegative_p (arg0))
6727 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6728 return fold_convert (type, arg0);
6729 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6730 return build2 (COMPLEX_EXPR, type,
6731 TREE_OPERAND (arg0, 0),
6732 negate_expr (TREE_OPERAND (arg0, 1)));
6733 else if (TREE_CODE (arg0) == COMPLEX_CST)
6734 return build_complex (type, TREE_REALPART (arg0),
6735 negate_expr (TREE_IMAGPART (arg0)));
6736 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6737 return fold (build2 (TREE_CODE (arg0), type,
6738 fold (build1 (CONJ_EXPR, type,
6739 TREE_OPERAND (arg0, 0))),
6740 fold (build1 (CONJ_EXPR, type,
6741 TREE_OPERAND (arg0, 1)))));
6742 else if (TREE_CODE (arg0) == CONJ_EXPR)
6743 return TREE_OPERAND (arg0, 0);
6747 if (TREE_CODE (arg0) == INTEGER_CST)
6748 return fold_not_const (arg0, type);
6749 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6750 return TREE_OPERAND (arg0, 0);
6754 /* A + (-B) -> A - B */
6755 if (TREE_CODE (arg1) == NEGATE_EXPR)
6756 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6757 /* (-A) + B -> B - A */
6758 if (TREE_CODE (arg0) == NEGATE_EXPR
6759 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6760 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6761 if (! FLOAT_TYPE_P (type))
6763 if (integer_zerop (arg1))
6764 return non_lvalue (fold_convert (type, arg0));
6766 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6767 with a constant, and the two constants have no bits in common,
6768 we should treat this as a BIT_IOR_EXPR since this may produce more
6770 if (TREE_CODE (arg0) == BIT_AND_EXPR
6771 && TREE_CODE (arg1) == BIT_AND_EXPR
6772 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6773 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6774 && integer_zerop (const_binop (BIT_AND_EXPR,
6775 TREE_OPERAND (arg0, 1),
6776 TREE_OPERAND (arg1, 1), 0)))
6778 code = BIT_IOR_EXPR;
6782 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6783 (plus (plus (mult) (mult)) (foo)) so that we can
6784 take advantage of the factoring cases below. */
6785 if (((TREE_CODE (arg0) == PLUS_EXPR
6786 || TREE_CODE (arg0) == MINUS_EXPR)
6787 && TREE_CODE (arg1) == MULT_EXPR)
6788 || ((TREE_CODE (arg1) == PLUS_EXPR
6789 || TREE_CODE (arg1) == MINUS_EXPR)
6790 && TREE_CODE (arg0) == MULT_EXPR))
6792 tree parg0, parg1, parg, marg;
6793 enum tree_code pcode;
6795 if (TREE_CODE (arg1) == MULT_EXPR)
6796 parg = arg0, marg = arg1;
6798 parg = arg1, marg = arg0;
6799 pcode = TREE_CODE (parg);
6800 parg0 = TREE_OPERAND (parg, 0);
6801 parg1 = TREE_OPERAND (parg, 1);
6805 if (TREE_CODE (parg0) == MULT_EXPR
6806 && TREE_CODE (parg1) != MULT_EXPR)
6807 return fold (build2 (pcode, type,
6808 fold (build2 (PLUS_EXPR, type,
6809 fold_convert (type, parg0),
6810 fold_convert (type, marg))),
6811 fold_convert (type, parg1)));
6812 if (TREE_CODE (parg0) != MULT_EXPR
6813 && TREE_CODE (parg1) == MULT_EXPR)
6814 return fold (build2 (PLUS_EXPR, type,
6815 fold_convert (type, parg0),
6816 fold (build2 (pcode, type,
6817 fold_convert (type, marg),
6822 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6824 tree arg00, arg01, arg10, arg11;
6825 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6827 /* (A * C) + (B * C) -> (A+B) * C.
6828 We are most concerned about the case where C is a constant,
6829 but other combinations show up during loop reduction. Since
6830 it is not difficult, try all four possibilities. */
6832 arg00 = TREE_OPERAND (arg0, 0);
6833 arg01 = TREE_OPERAND (arg0, 1);
6834 arg10 = TREE_OPERAND (arg1, 0);
6835 arg11 = TREE_OPERAND (arg1, 1);
6838 if (operand_equal_p (arg01, arg11, 0))
6839 same = arg01, alt0 = arg00, alt1 = arg10;
6840 else if (operand_equal_p (arg00, arg10, 0))
6841 same = arg00, alt0 = arg01, alt1 = arg11;
6842 else if (operand_equal_p (arg00, arg11, 0))
6843 same = arg00, alt0 = arg01, alt1 = arg10;
6844 else if (operand_equal_p (arg01, arg10, 0))
6845 same = arg01, alt0 = arg00, alt1 = arg11;
6847 /* No identical multiplicands; see if we can find a common
6848 power-of-two factor in non-power-of-two multiplies. This
6849 can help in multi-dimensional array access. */
6850 else if (TREE_CODE (arg01) == INTEGER_CST
6851 && TREE_CODE (arg11) == INTEGER_CST
6852 && TREE_INT_CST_HIGH (arg01) == 0
6853 && TREE_INT_CST_HIGH (arg11) == 0)
6855 HOST_WIDE_INT int01, int11, tmp;
6856 int01 = TREE_INT_CST_LOW (arg01);
6857 int11 = TREE_INT_CST_LOW (arg11);
6859 /* Move min of absolute values to int11. */
6860 if ((int01 >= 0 ? int01 : -int01)
6861 < (int11 >= 0 ? int11 : -int11))
6863 tmp = int01, int01 = int11, int11 = tmp;
6864 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6865 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6868 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6870 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6871 build_int_cst (NULL_TREE,
6879 return fold (build2 (MULT_EXPR, type,
6880 fold (build2 (PLUS_EXPR, type,
6881 fold_convert (type, alt0),
6882 fold_convert (type, alt1))),
6886 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6887 of the array. Loop optimizer sometimes produce this type of
6889 if (TREE_CODE (arg0) == ADDR_EXPR
6890 && TREE_CODE (arg1) == MULT_EXPR)
6892 tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
6896 else if (TREE_CODE (arg1) == ADDR_EXPR
6897 && TREE_CODE (arg0) == MULT_EXPR)
6899 tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
6906 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6907 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6908 return non_lvalue (fold_convert (type, arg0));
6910 /* Likewise if the operands are reversed. */
6911 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6912 return non_lvalue (fold_convert (type, arg1));
6914 /* Convert X + -C into X - C. */
6915 if (TREE_CODE (arg1) == REAL_CST
6916 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6918 tem = fold_negate_const (arg1, type);
6919 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6920 return fold (build2 (MINUS_EXPR, type,
6921 fold_convert (type, arg0),
6922 fold_convert (type, tem)));
6925 /* Convert x+x into x*2.0. */
6926 if (operand_equal_p (arg0, arg1, 0)
6927 && SCALAR_FLOAT_TYPE_P (type))
6928 return fold (build2 (MULT_EXPR, type, arg0,
6929 build_real (type, dconst2)));
6931 /* Convert x*c+x into x*(c+1). */
6932 if (flag_unsafe_math_optimizations
6933 && TREE_CODE (arg0) == MULT_EXPR
6934 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6935 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6940 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6941 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6942 return fold (build2 (MULT_EXPR, type, arg1,
6943 build_real (type, c)));
6946 /* Convert x+x*c into x*(c+1). */
6947 if (flag_unsafe_math_optimizations
6948 && TREE_CODE (arg1) == MULT_EXPR
6949 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6950 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6951 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6955 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6956 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6957 return fold (build2 (MULT_EXPR, type, arg0,
6958 build_real (type, c)));
6961 /* Convert x*c1+x*c2 into x*(c1+c2). */
6962 if (flag_unsafe_math_optimizations
6963 && TREE_CODE (arg0) == MULT_EXPR
6964 && TREE_CODE (arg1) == MULT_EXPR
6965 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6966 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6967 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6968 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6969 && operand_equal_p (TREE_OPERAND (arg0, 0),
6970 TREE_OPERAND (arg1, 0), 0))
6972 REAL_VALUE_TYPE c1, c2;
6974 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6975 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6976 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6977 return fold (build2 (MULT_EXPR, type,
6978 TREE_OPERAND (arg0, 0),
6979 build_real (type, c1)));
6981 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6982 if (flag_unsafe_math_optimizations
6983 && TREE_CODE (arg1) == PLUS_EXPR
6984 && TREE_CODE (arg0) != MULT_EXPR)
6986 tree tree10 = TREE_OPERAND (arg1, 0);
6987 tree tree11 = TREE_OPERAND (arg1, 1);
6988 if (TREE_CODE (tree11) == MULT_EXPR
6989 && TREE_CODE (tree10) == MULT_EXPR)
6992 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6993 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6996 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6997 if (flag_unsafe_math_optimizations
6998 && TREE_CODE (arg0) == PLUS_EXPR
6999 && TREE_CODE (arg1) != MULT_EXPR)
7001 tree tree00 = TREE_OPERAND (arg0, 0);
7002 tree tree01 = TREE_OPERAND (arg0, 1);
7003 if (TREE_CODE (tree01) == MULT_EXPR
7004 && TREE_CODE (tree00) == MULT_EXPR)
7007 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
7008 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
7014 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7015 is a rotate of A by C1 bits. */
7016 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7017 is a rotate of A by B bits. */
7019 enum tree_code code0, code1;
7020 code0 = TREE_CODE (arg0);
7021 code1 = TREE_CODE (arg1);
7022 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7023 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7024 && operand_equal_p (TREE_OPERAND (arg0, 0),
7025 TREE_OPERAND (arg1, 0), 0)
7026 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7028 tree tree01, tree11;
7029 enum tree_code code01, code11;
7031 tree01 = TREE_OPERAND (arg0, 1);
7032 tree11 = TREE_OPERAND (arg1, 1);
7033 STRIP_NOPS (tree01);
7034 STRIP_NOPS (tree11);
7035 code01 = TREE_CODE (tree01);
7036 code11 = TREE_CODE (tree11);
7037 if (code01 == INTEGER_CST
7038 && code11 == INTEGER_CST
7039 && TREE_INT_CST_HIGH (tree01) == 0
7040 && TREE_INT_CST_HIGH (tree11) == 0
7041 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7042 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7043 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7044 code0 == LSHIFT_EXPR ? tree01 : tree11);
7045 else if (code11 == MINUS_EXPR)
7047 tree tree110, tree111;
7048 tree110 = TREE_OPERAND (tree11, 0);
7049 tree111 = TREE_OPERAND (tree11, 1);
7050 STRIP_NOPS (tree110);
7051 STRIP_NOPS (tree111);
7052 if (TREE_CODE (tree110) == INTEGER_CST
7053 && 0 == compare_tree_int (tree110,
7055 (TREE_TYPE (TREE_OPERAND
7057 && operand_equal_p (tree01, tree111, 0))
7058 return build2 ((code0 == LSHIFT_EXPR
7061 type, TREE_OPERAND (arg0, 0), tree01);
7063 else if (code01 == MINUS_EXPR)
7065 tree tree010, tree011;
7066 tree010 = TREE_OPERAND (tree01, 0);
7067 tree011 = TREE_OPERAND (tree01, 1);
7068 STRIP_NOPS (tree010);
7069 STRIP_NOPS (tree011);
7070 if (TREE_CODE (tree010) == INTEGER_CST
7071 && 0 == compare_tree_int (tree010,
7073 (TREE_TYPE (TREE_OPERAND
7075 && operand_equal_p (tree11, tree011, 0))
7076 return build2 ((code0 != LSHIFT_EXPR
7079 type, TREE_OPERAND (arg0, 0), tree11);
7085 /* In most languages, can't associate operations on floats through
7086 parentheses. Rather than remember where the parentheses were, we
7087 don't associate floats at all, unless the user has specified
7088 -funsafe-math-optimizations. */
7091 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7093 tree var0, con0, lit0, minus_lit0;
7094 tree var1, con1, lit1, minus_lit1;
7096 /* Split both trees into variables, constants, and literals. Then
7097 associate each group together, the constants with literals,
7098 then the result with variables. This increases the chances of
7099 literals being recombined later and of generating relocatable
7100 expressions for the sum of a constant and literal. */
7101 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7102 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7103 code == MINUS_EXPR);
7105 /* Only do something if we found more than two objects. Otherwise,
7106 nothing has changed and we risk infinite recursion. */
7107 if (2 < ((var0 != 0) + (var1 != 0)
7108 + (con0 != 0) + (con1 != 0)
7109 + (lit0 != 0) + (lit1 != 0)
7110 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7112 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7113 if (code == MINUS_EXPR)
7116 var0 = associate_trees (var0, var1, code, type);
7117 con0 = associate_trees (con0, con1, code, type);
7118 lit0 = associate_trees (lit0, lit1, code, type);
7119 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7121 /* Preserve the MINUS_EXPR if the negative part of the literal is
7122 greater than the positive part. Otherwise, the multiplicative
7123 folding code (i.e extract_muldiv) may be fooled in case
7124 unsigned constants are subtracted, like in the following
7125 example: ((X*2 + 4) - 8U)/2. */
7126 if (minus_lit0 && lit0)
7128 if (TREE_CODE (lit0) == INTEGER_CST
7129 && TREE_CODE (minus_lit0) == INTEGER_CST
7130 && tree_int_cst_lt (lit0, minus_lit0))
7132 minus_lit0 = associate_trees (minus_lit0, lit0,
7138 lit0 = associate_trees (lit0, minus_lit0,
7146 return fold_convert (type,
7147 associate_trees (var0, minus_lit0,
7151 con0 = associate_trees (con0, minus_lit0,
7153 return fold_convert (type,
7154 associate_trees (var0, con0,
7159 con0 = associate_trees (con0, lit0, code, type);
7160 return fold_convert (type, associate_trees (var0, con0,
7167 t1 = const_binop (code, arg0, arg1, 0);
7168 if (t1 != NULL_TREE)
7170 /* The return value should always have
7171 the same type as the original expression. */
7172 if (TREE_TYPE (t1) != type)
7173 t1 = fold_convert (type, t1);
7180 /* A - (-B) -> A + B */
7181 if (TREE_CODE (arg1) == NEGATE_EXPR)
7182 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7183 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7184 if (TREE_CODE (arg0) == NEGATE_EXPR
7185 && (FLOAT_TYPE_P (type)
7186 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7187 && negate_expr_p (arg1)
7188 && reorder_operands_p (arg0, arg1))
7189 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7190 TREE_OPERAND (arg0, 0)));
7192 if (! FLOAT_TYPE_P (type))
7194 if (! wins && integer_zerop (arg0))
7195 return negate_expr (fold_convert (type, arg1));
7196 if (integer_zerop (arg1))
7197 return non_lvalue (fold_convert (type, arg0));
7199 /* Fold A - (A & B) into ~B & A. */
7200 if (!TREE_SIDE_EFFECTS (arg0)
7201 && TREE_CODE (arg1) == BIT_AND_EXPR)
7203 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7204 return fold (build2 (BIT_AND_EXPR, type,
7205 fold (build1 (BIT_NOT_EXPR, type,
7206 TREE_OPERAND (arg1, 0))),
7208 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7209 return fold (build2 (BIT_AND_EXPR, type,
7210 fold (build1 (BIT_NOT_EXPR, type,
7211 TREE_OPERAND (arg1, 1))),
7215 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7216 any power of 2 minus 1. */
7217 if (TREE_CODE (arg0) == BIT_AND_EXPR
7218 && TREE_CODE (arg1) == BIT_AND_EXPR
7219 && operand_equal_p (TREE_OPERAND (arg0, 0),
7220 TREE_OPERAND (arg1, 0), 0))
7222 tree mask0 = TREE_OPERAND (arg0, 1);
7223 tree mask1 = TREE_OPERAND (arg1, 1);
7224 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7226 if (operand_equal_p (tem, mask1, 0))
7228 tem = fold (build2 (BIT_XOR_EXPR, type,
7229 TREE_OPERAND (arg0, 0), mask1));
7230 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7235 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7236 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7237 return non_lvalue (fold_convert (type, arg0));
7239 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7240 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7241 (-ARG1 + ARG0) reduces to -ARG1. */
7242 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7243 return negate_expr (fold_convert (type, arg1));
7245 /* Fold &x - &x. This can happen from &x.foo - &x.
7246 This is unsafe for certain floats even in non-IEEE formats.
7247 In IEEE, it is unsafe because it does wrong for NaNs.
7248 Also note that operand_equal_p is always false if an operand
7251 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7252 && operand_equal_p (arg0, arg1, 0))
7253 return fold_convert (type, integer_zero_node);
7255 /* A - B -> A + (-B) if B is easily negatable. */
7256 if (!wins && negate_expr_p (arg1)
7257 && ((FLOAT_TYPE_P (type)
7258 /* Avoid this transformation if B is a positive REAL_CST. */
7259 && (TREE_CODE (arg1) != REAL_CST
7260 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7261 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7262 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7264 /* Try folding difference of addresses. */
7268 if ((TREE_CODE (arg0) == ADDR_EXPR
7269 || TREE_CODE (arg1) == ADDR_EXPR)
7270 && ptr_difference_const (arg0, arg1, &diff))
7271 return build_int_cst_type (type, diff);
7274 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7275 of the array. Loop optimizer sometimes produce this type of
7277 if (TREE_CODE (arg0) == ADDR_EXPR
7278 && TREE_CODE (arg1) == MULT_EXPR)
7280 tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
7285 if (TREE_CODE (arg0) == MULT_EXPR
7286 && TREE_CODE (arg1) == MULT_EXPR
7287 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7289 /* (A * C) - (B * C) -> (A-B) * C. */
7290 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7291 TREE_OPERAND (arg1, 1), 0))
7292 return fold (build2 (MULT_EXPR, type,
7293 fold (build2 (MINUS_EXPR, type,
7294 TREE_OPERAND (arg0, 0),
7295 TREE_OPERAND (arg1, 0))),
7296 TREE_OPERAND (arg0, 1)));
7297 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7298 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7299 TREE_OPERAND (arg1, 0), 0))
7300 return fold (build2 (MULT_EXPR, type,
7301 TREE_OPERAND (arg0, 0),
7302 fold (build2 (MINUS_EXPR, type,
7303 TREE_OPERAND (arg0, 1),
7304 TREE_OPERAND (arg1, 1)))));
7310 /* (-A) * (-B) -> A * B */
7311 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7312 return fold (build2 (MULT_EXPR, type,
7313 TREE_OPERAND (arg0, 0),
7314 negate_expr (arg1)));
7315 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7316 return fold (build2 (MULT_EXPR, type,
7318 TREE_OPERAND (arg1, 0)));
7320 if (! FLOAT_TYPE_P (type))
7322 if (integer_zerop (arg1))
7323 return omit_one_operand (type, arg1, arg0);
7324 if (integer_onep (arg1))
7325 return non_lvalue (fold_convert (type, arg0));
7327 /* (a * (1 << b)) is (a << b) */
7328 if (TREE_CODE (arg1) == LSHIFT_EXPR
7329 && integer_onep (TREE_OPERAND (arg1, 0)))
7330 return fold (build2 (LSHIFT_EXPR, type, arg0,
7331 TREE_OPERAND (arg1, 1)));
7332 if (TREE_CODE (arg0) == LSHIFT_EXPR
7333 && integer_onep (TREE_OPERAND (arg0, 0)))
7334 return fold (build2 (LSHIFT_EXPR, type, arg1,
7335 TREE_OPERAND (arg0, 1)));
7337 if (TREE_CODE (arg1) == INTEGER_CST
7338 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7339 fold_convert (type, arg1),
7341 return fold_convert (type, tem);
7346 /* Maybe fold x * 0 to 0. The expressions aren't the same
7347 when x is NaN, since x * 0 is also NaN. Nor are they the
7348 same in modes with signed zeros, since multiplying a
7349 negative value by 0 gives -0, not +0. */
7350 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7351 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7352 && real_zerop (arg1))
7353 return omit_one_operand (type, arg1, arg0);
7354 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7355 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7356 && real_onep (arg1))
7357 return non_lvalue (fold_convert (type, arg0));
7359 /* Transform x * -1.0 into -x. */
7360 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7361 && real_minus_onep (arg1))
7362 return fold_convert (type, negate_expr (arg0));
7364 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7365 if (flag_unsafe_math_optimizations
7366 && TREE_CODE (arg0) == RDIV_EXPR
7367 && TREE_CODE (arg1) == REAL_CST
7368 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7370 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7373 return fold (build2 (RDIV_EXPR, type, tem,
7374 TREE_OPERAND (arg0, 1)));
7377 if (flag_unsafe_math_optimizations)
7379 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7380 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7382 /* Optimizations of root(...)*root(...). */
7383 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7385 tree rootfn, arg, arglist;
7386 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7387 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7389 /* Optimize sqrt(x)*sqrt(x) as x. */
7390 if (BUILTIN_SQRT_P (fcode0)
7391 && operand_equal_p (arg00, arg10, 0)
7392 && ! HONOR_SNANS (TYPE_MODE (type)))
7395 /* Optimize root(x)*root(y) as root(x*y). */
7396 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7397 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7398 arglist = build_tree_list (NULL_TREE, arg);
7399 return build_function_call_expr (rootfn, arglist);
7402 /* Optimize expN(x)*expN(y) as expN(x+y). */
7403 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7405 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7406 tree arg = build2 (PLUS_EXPR, type,
7407 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7408 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7409 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7410 return build_function_call_expr (expfn, arglist);
7413 /* Optimizations of pow(...)*pow(...). */
7414 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7415 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7416 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7418 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7419 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7421 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7422 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7425 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7426 if (operand_equal_p (arg01, arg11, 0))
7428 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7429 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7430 tree arglist = tree_cons (NULL_TREE, fold (arg),
7431 build_tree_list (NULL_TREE,
7433 return build_function_call_expr (powfn, arglist);
7436 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7437 if (operand_equal_p (arg00, arg10, 0))
7439 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7440 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7441 tree arglist = tree_cons (NULL_TREE, arg00,
7442 build_tree_list (NULL_TREE,
7444 return build_function_call_expr (powfn, arglist);
7448 /* Optimize tan(x)*cos(x) as sin(x). */
7449 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7450 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7451 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7452 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7453 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7454 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7455 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7456 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7458 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7460 if (sinfn != NULL_TREE)
7461 return build_function_call_expr (sinfn,
7462 TREE_OPERAND (arg0, 1));
7465 /* Optimize x*pow(x,c) as pow(x,c+1). */
7466 if (fcode1 == BUILT_IN_POW
7467 || fcode1 == BUILT_IN_POWF
7468 || fcode1 == BUILT_IN_POWL)
7470 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7471 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7473 if (TREE_CODE (arg11) == REAL_CST
7474 && ! TREE_CONSTANT_OVERFLOW (arg11)
7475 && operand_equal_p (arg0, arg10, 0))
7477 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7481 c = TREE_REAL_CST (arg11);
7482 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7483 arg = build_real (type, c);
7484 arglist = build_tree_list (NULL_TREE, arg);
7485 arglist = tree_cons (NULL_TREE, arg0, arglist);
7486 return build_function_call_expr (powfn, arglist);
7490 /* Optimize pow(x,c)*x as pow(x,c+1). */
7491 if (fcode0 == BUILT_IN_POW
7492 || fcode0 == BUILT_IN_POWF
7493 || fcode0 == BUILT_IN_POWL)
7495 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7496 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7498 if (TREE_CODE (arg01) == REAL_CST
7499 && ! TREE_CONSTANT_OVERFLOW (arg01)
7500 && operand_equal_p (arg1, arg00, 0))
7502 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7506 c = TREE_REAL_CST (arg01);
7507 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7508 arg = build_real (type, c);
7509 arglist = build_tree_list (NULL_TREE, arg);
7510 arglist = tree_cons (NULL_TREE, arg1, arglist);
7511 return build_function_call_expr (powfn, arglist);
7515 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7517 && operand_equal_p (arg0, arg1, 0))
7519 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7523 tree arg = build_real (type, dconst2);
7524 tree arglist = build_tree_list (NULL_TREE, arg);
7525 arglist = tree_cons (NULL_TREE, arg0, arglist);
7526 return build_function_call_expr (powfn, arglist);
7535 if (integer_all_onesp (arg1))
7536 return omit_one_operand (type, arg1, arg0);
7537 if (integer_zerop (arg1))
7538 return non_lvalue (fold_convert (type, arg0));
7539 if (operand_equal_p (arg0, arg1, 0))
7540 return non_lvalue (fold_convert (type, arg0));
7543 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7544 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7546 t1 = build_int_cst (type, -1);
7547 t1 = force_fit_type (t1, 0, false, false);
7548 return omit_one_operand (type, t1, arg1);
7552 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7555 t1 = build_int_cst (type, -1);
7556 t1 = force_fit_type (t1, 0, false, false);
7557 return omit_one_operand (type, t1, arg0);
7560 t1 = distribute_bit_expr (code, type, arg0, arg1);
7561 if (t1 != NULL_TREE)
7564 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7566 This results in more efficient code for machines without a NAND
7567 instruction. Combine will canonicalize to the first form
7568 which will allow use of NAND instructions provided by the
7569 backend if they exist. */
7570 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7571 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7573 return fold (build1 (BIT_NOT_EXPR, type,
7574 build2 (BIT_AND_EXPR, type,
7575 TREE_OPERAND (arg0, 0),
7576 TREE_OPERAND (arg1, 0))));
7579 /* See if this can be simplified into a rotate first. If that
7580 is unsuccessful continue in the association code. */
7584 if (integer_zerop (arg1))
7585 return non_lvalue (fold_convert (type, arg0));
7586 if (integer_all_onesp (arg1))
7587 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7588 if (operand_equal_p (arg0, arg1, 0))
7589 return omit_one_operand (type, integer_zero_node, arg0);
7592 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7593 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7595 t1 = build_int_cst (type, -1);
7596 t1 = force_fit_type (t1, 0, false, false);
7597 return omit_one_operand (type, t1, arg1);
7601 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7602 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7604 t1 = build_int_cst (type, -1);
7605 t1 = force_fit_type (t1, 0, false, false);
7606 return omit_one_operand (type, t1, arg0);
7609 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7610 with a constant, and the two constants have no bits in common,
7611 we should treat this as a BIT_IOR_EXPR since this may produce more
7613 if (TREE_CODE (arg0) == BIT_AND_EXPR
7614 && TREE_CODE (arg1) == BIT_AND_EXPR
7615 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7616 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7617 && integer_zerop (const_binop (BIT_AND_EXPR,
7618 TREE_OPERAND (arg0, 1),
7619 TREE_OPERAND (arg1, 1), 0)))
7621 code = BIT_IOR_EXPR;
7625 /* See if this can be simplified into a rotate first. If that
7626 is unsuccessful continue in the association code. */
7630 if (integer_all_onesp (arg1))
7631 return non_lvalue (fold_convert (type, arg0));
7632 if (integer_zerop (arg1))
7633 return omit_one_operand (type, arg1, arg0);
7634 if (operand_equal_p (arg0, arg1, 0))
7635 return non_lvalue (fold_convert (type, arg0));
7637 /* ~X & X is always zero. */
7638 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7639 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7640 return omit_one_operand (type, integer_zero_node, arg1);
7642 /* X & ~X is always zero. */
7643 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7644 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7645 return omit_one_operand (type, integer_zero_node, arg0);
7647 t1 = distribute_bit_expr (code, type, arg0, arg1);
7648 if (t1 != NULL_TREE)
7650 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7651 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7652 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7655 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7657 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7658 && (~TREE_INT_CST_LOW (arg1)
7659 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7660 return fold_convert (type, TREE_OPERAND (arg0, 0));
7663 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7665 This results in more efficient code for machines without a NOR
7666 instruction. Combine will canonicalize to the first form
7667 which will allow use of NOR instructions provided by the
7668 backend if they exist. */
7669 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7670 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7672 return fold (build1 (BIT_NOT_EXPR, type,
7673 build2 (BIT_IOR_EXPR, type,
7674 TREE_OPERAND (arg0, 0),
7675 TREE_OPERAND (arg1, 0))));
7681 /* Don't touch a floating-point divide by zero unless the mode
7682 of the constant can represent infinity. */
7683 if (TREE_CODE (arg1) == REAL_CST
7684 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7685 && real_zerop (arg1))
7688 /* (-A) / (-B) -> A / B */
7689 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7690 return fold (build2 (RDIV_EXPR, type,
7691 TREE_OPERAND (arg0, 0),
7692 negate_expr (arg1)));
7693 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7694 return fold (build2 (RDIV_EXPR, type,
7696 TREE_OPERAND (arg1, 0)));
7698 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7699 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7700 && real_onep (arg1))
7701 return non_lvalue (fold_convert (type, arg0));
7703 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7704 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7705 && real_minus_onep (arg1))
7706 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7708 /* If ARG1 is a constant, we can convert this to a multiply by the
7709 reciprocal. This does not have the same rounding properties,
7710 so only do this if -funsafe-math-optimizations. We can actually
7711 always safely do it if ARG1 is a power of two, but it's hard to
7712 tell if it is or not in a portable manner. */
7713 if (TREE_CODE (arg1) == REAL_CST)
7715 if (flag_unsafe_math_optimizations
7716 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7718 return fold (build2 (MULT_EXPR, type, arg0, tem));
7719 /* Find the reciprocal if optimizing and the result is exact. */
7723 r = TREE_REAL_CST (arg1);
7724 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7726 tem = build_real (type, r);
7727 return fold (build2 (MULT_EXPR, type, arg0, tem));
7731 /* Convert A/B/C to A/(B*C). */
7732 if (flag_unsafe_math_optimizations
7733 && TREE_CODE (arg0) == RDIV_EXPR)
7734 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7735 fold (build2 (MULT_EXPR, type,
7736 TREE_OPERAND (arg0, 1), arg1))));
7738 /* Convert A/(B/C) to (A/B)*C. */
7739 if (flag_unsafe_math_optimizations
7740 && TREE_CODE (arg1) == RDIV_EXPR)
7741 return fold (build2 (MULT_EXPR, type,
7742 fold (build2 (RDIV_EXPR, type, arg0,
7743 TREE_OPERAND (arg1, 0))),
7744 TREE_OPERAND (arg1, 1)));
7746 /* Convert C1/(X*C2) into (C1/C2)/X. */
7747 if (flag_unsafe_math_optimizations
7748 && TREE_CODE (arg1) == MULT_EXPR
7749 && TREE_CODE (arg0) == REAL_CST
7750 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7752 tree tem = const_binop (RDIV_EXPR, arg0,
7753 TREE_OPERAND (arg1, 1), 0);
7755 return fold (build2 (RDIV_EXPR, type, tem,
7756 TREE_OPERAND (arg1, 0)));
7759 if (flag_unsafe_math_optimizations)
7761 enum built_in_function fcode = builtin_mathfn_code (arg1);
7762 /* Optimize x/expN(y) into x*expN(-y). */
7763 if (BUILTIN_EXPONENT_P (fcode))
7765 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7766 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7767 tree arglist = build_tree_list (NULL_TREE,
7768 fold_convert (type, arg));
7769 arg1 = build_function_call_expr (expfn, arglist);
7770 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7773 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7774 if (fcode == BUILT_IN_POW
7775 || fcode == BUILT_IN_POWF
7776 || fcode == BUILT_IN_POWL)
7778 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7779 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7780 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7781 tree neg11 = fold_convert (type, negate_expr (arg11));
7782 tree arglist = tree_cons(NULL_TREE, arg10,
7783 build_tree_list (NULL_TREE, neg11));
7784 arg1 = build_function_call_expr (powfn, arglist);
7785 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7789 if (flag_unsafe_math_optimizations)
7791 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7792 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7794 /* Optimize sin(x)/cos(x) as tan(x). */
7795 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7796 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7797 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7798 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7799 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7801 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7803 if (tanfn != NULL_TREE)
7804 return build_function_call_expr (tanfn,
7805 TREE_OPERAND (arg0, 1));
7808 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7809 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7810 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7811 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7812 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7813 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7815 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7817 if (tanfn != NULL_TREE)
7819 tree tmp = TREE_OPERAND (arg0, 1);
7820 tmp = build_function_call_expr (tanfn, tmp);
7821 return fold (build2 (RDIV_EXPR, type,
7822 build_real (type, dconst1), tmp));
7826 /* Optimize pow(x,c)/x as pow(x,c-1). */
7827 if (fcode0 == BUILT_IN_POW
7828 || fcode0 == BUILT_IN_POWF
7829 || fcode0 == BUILT_IN_POWL)
7831 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7832 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7833 if (TREE_CODE (arg01) == REAL_CST
7834 && ! TREE_CONSTANT_OVERFLOW (arg01)
7835 && operand_equal_p (arg1, arg00, 0))
7837 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7841 c = TREE_REAL_CST (arg01);
7842 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7843 arg = build_real (type, c);
7844 arglist = build_tree_list (NULL_TREE, arg);
7845 arglist = tree_cons (NULL_TREE, arg1, arglist);
7846 return build_function_call_expr (powfn, arglist);
7852 case TRUNC_DIV_EXPR:
7853 case ROUND_DIV_EXPR:
7854 case FLOOR_DIV_EXPR:
7856 case EXACT_DIV_EXPR:
7857 if (integer_onep (arg1))
7858 return non_lvalue (fold_convert (type, arg0));
7859 if (integer_zerop (arg1))
7862 if (!TYPE_UNSIGNED (type)
7863 && TREE_CODE (arg1) == INTEGER_CST
7864 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7865 && TREE_INT_CST_HIGH (arg1) == -1)
7866 return fold_convert (type, negate_expr (arg0));
7868 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7869 operation, EXACT_DIV_EXPR.
7871 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7872 At one time others generated faster code, it's not clear if they do
7873 after the last round to changes to the DIV code in expmed.c. */
7874 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7875 && multiple_of_p (type, arg0, arg1))
7876 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7878 if (TREE_CODE (arg1) == INTEGER_CST
7879 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7881 return fold_convert (type, tem);
7886 case FLOOR_MOD_EXPR:
7887 case ROUND_MOD_EXPR:
7888 case TRUNC_MOD_EXPR:
7889 if (integer_onep (arg1))
7890 return omit_one_operand (type, integer_zero_node, arg0);
7891 if (integer_zerop (arg1))
7894 /* X % -1 is zero. */
7895 if (!TYPE_UNSIGNED (type)
7896 && TREE_CODE (arg1) == INTEGER_CST
7897 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7898 && TREE_INT_CST_HIGH (arg1) == -1)
7899 return omit_one_operand (type, integer_zero_node, arg0);
7901 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7902 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7903 if (code == TRUNC_MOD_EXPR
7904 && TYPE_UNSIGNED (type)
7905 && integer_pow2p (arg1))
7907 unsigned HOST_WIDE_INT high, low;
7911 l = tree_log2 (arg1);
7912 if (l >= HOST_BITS_PER_WIDE_INT)
7914 high = ((unsigned HOST_WIDE_INT) 1
7915 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7921 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7924 mask = build_int_cst_wide (type, low, high);
7925 return fold (build2 (BIT_AND_EXPR, type,
7926 fold_convert (type, arg0), mask));
7929 /* X % -C is the same as X % C. */
7930 if (code == TRUNC_MOD_EXPR
7931 && !TYPE_UNSIGNED (type)
7932 && TREE_CODE (arg1) == INTEGER_CST
7933 && TREE_INT_CST_HIGH (arg1) < 0
7935 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7936 && !sign_bit_p (arg1, arg1))
7937 return fold (build2 (code, type, fold_convert (type, arg0),
7938 fold_convert (type, negate_expr (arg1))));
7940 /* X % -Y is the same as X % Y. */
7941 if (code == TRUNC_MOD_EXPR
7942 && !TYPE_UNSIGNED (type)
7943 && TREE_CODE (arg1) == NEGATE_EXPR
7945 return fold (build2 (code, type, fold_convert (type, arg0),
7946 fold_convert (type, TREE_OPERAND (arg1, 0))));
7948 if (TREE_CODE (arg1) == INTEGER_CST
7949 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7951 return fold_convert (type, tem);
7957 if (integer_all_onesp (arg0))
7958 return omit_one_operand (type, arg0, arg1);
7962 /* Optimize -1 >> x for arithmetic right shifts. */
7963 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7964 return omit_one_operand (type, arg0, arg1);
7965 /* ... fall through ... */
7969 if (integer_zerop (arg1))
7970 return non_lvalue (fold_convert (type, arg0));
7971 if (integer_zerop (arg0))
7972 return omit_one_operand (type, arg0, arg1);
7974 /* Since negative shift count is not well-defined,
7975 don't try to compute it in the compiler. */
7976 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7978 /* Rewrite an LROTATE_EXPR by a constant into an
7979 RROTATE_EXPR by a new constant. */
7980 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7982 tree tem = build_int_cst (NULL_TREE,
7983 GET_MODE_BITSIZE (TYPE_MODE (type)));
7984 tem = fold_convert (TREE_TYPE (arg1), tem);
7985 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7986 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7989 /* If we have a rotate of a bit operation with the rotate count and
7990 the second operand of the bit operation both constant,
7991 permute the two operations. */
7992 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7993 && (TREE_CODE (arg0) == BIT_AND_EXPR
7994 || TREE_CODE (arg0) == BIT_IOR_EXPR
7995 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7996 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7997 return fold (build2 (TREE_CODE (arg0), type,
7998 fold (build2 (code, type,
7999 TREE_OPERAND (arg0, 0), arg1)),
8000 fold (build2 (code, type,
8001 TREE_OPERAND (arg0, 1), arg1))));
8003 /* Two consecutive rotates adding up to the width of the mode can
8005 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8006 && TREE_CODE (arg0) == RROTATE_EXPR
8007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8008 && TREE_INT_CST_HIGH (arg1) == 0
8009 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8010 && ((TREE_INT_CST_LOW (arg1)
8011 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8012 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8013 return TREE_OPERAND (arg0, 0);
8018 if (operand_equal_p (arg0, arg1, 0))
8019 return omit_one_operand (type, arg0, arg1);
8020 if (INTEGRAL_TYPE_P (type)
8021 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8022 return omit_one_operand (type, arg1, arg0);
8026 if (operand_equal_p (arg0, arg1, 0))
8027 return omit_one_operand (type, arg0, arg1);
8028 if (INTEGRAL_TYPE_P (type)
8029 && TYPE_MAX_VALUE (type)
8030 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8031 return omit_one_operand (type, arg1, arg0);
8034 case TRUTH_NOT_EXPR:
8035 /* The argument to invert_truthvalue must have Boolean type. */
8036 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8037 arg0 = fold_convert (boolean_type_node, arg0);
8039 /* Note that the operand of this must be an int
8040 and its values must be 0 or 1.
8041 ("true" is a fixed value perhaps depending on the language,
8042 but we don't handle values other than 1 correctly yet.) */
8043 tem = invert_truthvalue (arg0);
8044 /* Avoid infinite recursion. */
8045 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8047 tem = fold_single_bit_test (code, arg0, arg1, type);
8052 return fold_convert (type, tem);
8054 case TRUTH_ANDIF_EXPR:
8055 /* Note that the operands of this must be ints
8056 and their values must be 0 or 1.
8057 ("true" is a fixed value perhaps depending on the language.) */
8058 /* If first arg is constant zero, return it. */
8059 if (integer_zerop (arg0))
8060 return fold_convert (type, arg0);
8061 case TRUTH_AND_EXPR:
8062 /* If either arg is constant true, drop it. */
8063 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8064 return non_lvalue (fold_convert (type, arg1));
8065 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8066 /* Preserve sequence points. */
8067 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8068 return non_lvalue (fold_convert (type, arg0));
8069 /* If second arg is constant zero, result is zero, but first arg
8070 must be evaluated. */
8071 if (integer_zerop (arg1))
8072 return omit_one_operand (type, arg1, arg0);
8073 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8074 case will be handled here. */
8075 if (integer_zerop (arg0))
8076 return omit_one_operand (type, arg0, arg1);
8078 /* !X && X is always false. */
8079 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8080 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8081 return omit_one_operand (type, integer_zero_node, arg1);
8082 /* X && !X is always false. */
8083 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8084 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8085 return omit_one_operand (type, integer_zero_node, arg0);
8087 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8088 means A >= Y && A != MAX, but in this case we know that
8091 if (!TREE_SIDE_EFFECTS (arg0)
8092 && !TREE_SIDE_EFFECTS (arg1))
8094 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8096 return fold (build2 (code, type, tem, arg1));
8098 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8100 return fold (build2 (code, type, arg0, tem));
8104 /* We only do these simplifications if we are optimizing. */
8108 /* Check for things like (A || B) && (A || C). We can convert this
8109 to A || (B && C). Note that either operator can be any of the four
8110 truth and/or operations and the transformation will still be
8111 valid. Also note that we only care about order for the
8112 ANDIF and ORIF operators. If B contains side effects, this
8113 might change the truth-value of A. */
8114 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8115 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8116 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8117 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8118 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8119 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8121 tree a00 = TREE_OPERAND (arg0, 0);
8122 tree a01 = TREE_OPERAND (arg0, 1);
8123 tree a10 = TREE_OPERAND (arg1, 0);
8124 tree a11 = TREE_OPERAND (arg1, 1);
8125 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8126 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8127 && (code == TRUTH_AND_EXPR
8128 || code == TRUTH_OR_EXPR));
8130 if (operand_equal_p (a00, a10, 0))
8131 return fold (build2 (TREE_CODE (arg0), type, a00,
8132 fold (build2 (code, type, a01, a11))));
8133 else if (commutative && operand_equal_p (a00, a11, 0))
8134 return fold (build2 (TREE_CODE (arg0), type, a00,
8135 fold (build2 (code, type, a01, a10))));
8136 else if (commutative && operand_equal_p (a01, a10, 0))
8137 return fold (build2 (TREE_CODE (arg0), type, a01,
8138 fold (build2 (code, type, a00, a11))));
8140 /* This case if tricky because we must either have commutative
8141 operators or else A10 must not have side-effects. */
8143 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8144 && operand_equal_p (a01, a11, 0))
8145 return fold (build2 (TREE_CODE (arg0), type,
8146 fold (build2 (code, type, a00, a10)),
8150 /* See if we can build a range comparison. */
8151 if (0 != (tem = fold_range_test (t)))
8154 /* Check for the possibility of merging component references. If our
8155 lhs is another similar operation, try to merge its rhs with our
8156 rhs. Then try to merge our lhs and rhs. */
8157 if (TREE_CODE (arg0) == code
8158 && 0 != (tem = fold_truthop (code, type,
8159 TREE_OPERAND (arg0, 1), arg1)))
8160 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8162 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8167 case TRUTH_ORIF_EXPR:
8168 /* Note that the operands of this must be ints
8169 and their values must be 0 or true.
8170 ("true" is a fixed value perhaps depending on the language.) */
8171 /* If first arg is constant true, return it. */
8172 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8173 return fold_convert (type, arg0);
8175 /* If either arg is constant zero, drop it. */
8176 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8177 return non_lvalue (fold_convert (type, arg1));
8178 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8179 /* Preserve sequence points. */
8180 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8181 return non_lvalue (fold_convert (type, arg0));
8182 /* If second arg is constant true, result is true, but we must
8183 evaluate first arg. */
8184 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8185 return omit_one_operand (type, arg1, arg0);
8186 /* Likewise for first arg, but note this only occurs here for
8188 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8189 return omit_one_operand (type, arg0, arg1);
8191 /* !X || X is always true. */
8192 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8193 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8194 return omit_one_operand (type, integer_one_node, arg1);
8195 /* X || !X is always true. */
8196 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8197 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8198 return omit_one_operand (type, integer_one_node, arg0);
8202 case TRUTH_XOR_EXPR:
8203 /* If the second arg is constant zero, drop it. */
8204 if (integer_zerop (arg1))
8205 return non_lvalue (fold_convert (type, arg0));
8206 /* If the second arg is constant true, this is a logical inversion. */
8207 if (integer_onep (arg1))
8208 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8209 /* Identical arguments cancel to zero. */
8210 if (operand_equal_p (arg0, arg1, 0))
8211 return omit_one_operand (type, integer_zero_node, arg0);
8213 /* !X ^ X is always true. */
8214 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8215 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8216 return omit_one_operand (type, integer_one_node, arg1);
8218 /* X ^ !X is always true. */
8219 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8220 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8221 return omit_one_operand (type, integer_one_node, arg0);
8231 /* If one arg is a real or integer constant, put it last. */
8232 if (tree_swap_operands_p (arg0, arg1, true))
8233 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8235 /* If this is an equality comparison of the address of a non-weak
8236 object against zero, then we know the result. */
8237 if ((code == EQ_EXPR || code == NE_EXPR)
8238 && TREE_CODE (arg0) == ADDR_EXPR
8239 && DECL_P (TREE_OPERAND (arg0, 0))
8240 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8241 && integer_zerop (arg1))
8242 return constant_boolean_node (code != EQ_EXPR, type);
8244 /* If this is an equality comparison of the address of two non-weak,
8245 unaliased symbols neither of which are extern (since we do not
8246 have access to attributes for externs), then we know the result. */
8247 if ((code == EQ_EXPR || code == NE_EXPR)
8248 && TREE_CODE (arg0) == ADDR_EXPR
8249 && DECL_P (TREE_OPERAND (arg0, 0))
8250 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8251 && ! lookup_attribute ("alias",
8252 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8253 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8254 && TREE_CODE (arg1) == ADDR_EXPR
8255 && DECL_P (TREE_OPERAND (arg1, 0))
8256 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8257 && ! lookup_attribute ("alias",
8258 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8259 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8260 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8261 ? code == EQ_EXPR : code != EQ_EXPR,
8264 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8266 tree targ0 = strip_float_extensions (arg0);
8267 tree targ1 = strip_float_extensions (arg1);
8268 tree newtype = TREE_TYPE (targ0);
8270 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8271 newtype = TREE_TYPE (targ1);
8273 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8274 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8275 return fold (build2 (code, type, fold_convert (newtype, targ0),
8276 fold_convert (newtype, targ1)));
8278 /* (-a) CMP (-b) -> b CMP a */
8279 if (TREE_CODE (arg0) == NEGATE_EXPR
8280 && TREE_CODE (arg1) == NEGATE_EXPR)
8281 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8282 TREE_OPERAND (arg0, 0)));
8284 if (TREE_CODE (arg1) == REAL_CST)
8286 REAL_VALUE_TYPE cst;
8287 cst = TREE_REAL_CST (arg1);
8289 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8290 if (TREE_CODE (arg0) == NEGATE_EXPR)
8292 fold (build2 (swap_tree_comparison (code), type,
8293 TREE_OPERAND (arg0, 0),
8294 build_real (TREE_TYPE (arg1),
8295 REAL_VALUE_NEGATE (cst))));
8297 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8298 /* a CMP (-0) -> a CMP 0 */
8299 if (REAL_VALUE_MINUS_ZERO (cst))
8300 return fold (build2 (code, type, arg0,
8301 build_real (TREE_TYPE (arg1), dconst0)));
8303 /* x != NaN is always true, other ops are always false. */
8304 if (REAL_VALUE_ISNAN (cst)
8305 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8307 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8308 return omit_one_operand (type, tem, arg0);
8311 /* Fold comparisons against infinity. */
8312 if (REAL_VALUE_ISINF (cst))
8314 tem = fold_inf_compare (code, type, arg0, arg1);
8315 if (tem != NULL_TREE)
8320 /* If this is a comparison of a real constant with a PLUS_EXPR
8321 or a MINUS_EXPR of a real constant, we can convert it into a
8322 comparison with a revised real constant as long as no overflow
8323 occurs when unsafe_math_optimizations are enabled. */
8324 if (flag_unsafe_math_optimizations
8325 && TREE_CODE (arg1) == REAL_CST
8326 && (TREE_CODE (arg0) == PLUS_EXPR
8327 || TREE_CODE (arg0) == MINUS_EXPR)
8328 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8329 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8330 ? MINUS_EXPR : PLUS_EXPR,
8331 arg1, TREE_OPERAND (arg0, 1), 0))
8332 && ! TREE_CONSTANT_OVERFLOW (tem))
8333 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8335 /* Likewise, we can simplify a comparison of a real constant with
8336 a MINUS_EXPR whose first operand is also a real constant, i.e.
8337 (c1 - x) < c2 becomes x > c1-c2. */
8338 if (flag_unsafe_math_optimizations
8339 && TREE_CODE (arg1) == REAL_CST
8340 && TREE_CODE (arg0) == MINUS_EXPR
8341 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8342 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8344 && ! TREE_CONSTANT_OVERFLOW (tem))
8345 return fold (build2 (swap_tree_comparison (code), type,
8346 TREE_OPERAND (arg0, 1), tem));
8348 /* Fold comparisons against built-in math functions. */
8349 if (TREE_CODE (arg1) == REAL_CST
8350 && flag_unsafe_math_optimizations
8351 && ! flag_errno_math)
8353 enum built_in_function fcode = builtin_mathfn_code (arg0);
8355 if (fcode != END_BUILTINS)
8357 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8358 if (tem != NULL_TREE)
8364 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8365 if (TREE_CONSTANT (arg1)
8366 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8367 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8368 /* This optimization is invalid for ordered comparisons
8369 if CONST+INCR overflows or if foo+incr might overflow.
8370 This optimization is invalid for floating point due to rounding.
8371 For pointer types we assume overflow doesn't happen. */
8372 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8373 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8374 && (code == EQ_EXPR || code == NE_EXPR))))
8376 tree varop, newconst;
8378 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8380 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8381 arg1, TREE_OPERAND (arg0, 1)));
8382 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8383 TREE_OPERAND (arg0, 0),
8384 TREE_OPERAND (arg0, 1));
8388 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8389 arg1, TREE_OPERAND (arg0, 1)));
8390 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8391 TREE_OPERAND (arg0, 0),
8392 TREE_OPERAND (arg0, 1));
8396 /* If VAROP is a reference to a bitfield, we must mask
8397 the constant by the width of the field. */
8398 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8399 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8400 && host_integerp (DECL_SIZE (TREE_OPERAND
8401 (TREE_OPERAND (varop, 0), 1)), 1))
8403 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8404 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8405 tree folded_compare, shift;
8407 /* First check whether the comparison would come out
8408 always the same. If we don't do that we would
8409 change the meaning with the masking. */
8410 folded_compare = fold (build2 (code, type,
8411 TREE_OPERAND (varop, 0), arg1));
8412 if (integer_zerop (folded_compare)
8413 || integer_onep (folded_compare))
8414 return omit_one_operand (type, folded_compare, varop);
8416 shift = build_int_cst (NULL_TREE,
8417 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8418 shift = fold_convert (TREE_TYPE (varop), shift);
8419 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8421 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8425 return fold (build2 (code, type, varop, newconst));
8428 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8429 This transformation affects the cases which are handled in later
8430 optimizations involving comparisons with non-negative constants. */
8431 if (TREE_CODE (arg1) == INTEGER_CST
8432 && TREE_CODE (arg0) != INTEGER_CST
8433 && tree_int_cst_sgn (arg1) > 0)
8438 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8439 return fold (build2 (GT_EXPR, type, arg0, arg1));
8442 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8443 return fold (build2 (LE_EXPR, type, arg0, arg1));
8450 /* Comparisons with the highest or lowest possible integer of
8451 the specified size will have known values.
8453 This is quite similar to fold_relational_hi_lo, however,
8454 attempts to share the code have been nothing but trouble. */
8456 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8458 if (TREE_CODE (arg1) == INTEGER_CST
8459 && ! TREE_CONSTANT_OVERFLOW (arg1)
8460 && width <= HOST_BITS_PER_WIDE_INT
8461 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8462 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8464 unsigned HOST_WIDE_INT signed_max;
8465 unsigned HOST_WIDE_INT max, min;
8467 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8469 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8471 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8477 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8480 if (TREE_INT_CST_HIGH (arg1) == 0
8481 && TREE_INT_CST_LOW (arg1) == max)
8485 return omit_one_operand (type, integer_zero_node, arg0);
8488 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8491 return omit_one_operand (type, integer_one_node, arg0);
8494 return fold (build2 (NE_EXPR, type, arg0, arg1));
8496 /* The GE_EXPR and LT_EXPR cases above are not normally
8497 reached because of previous transformations. */
8502 else if (TREE_INT_CST_HIGH (arg1) == 0
8503 && TREE_INT_CST_LOW (arg1) == max - 1)
8507 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8508 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8510 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8511 return fold (build2 (NE_EXPR, type, arg0, arg1));
8515 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8516 && TREE_INT_CST_LOW (arg1) == min)
8520 return omit_one_operand (type, integer_zero_node, arg0);
8523 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8526 return omit_one_operand (type, integer_one_node, arg0);
8529 return fold (build2 (NE_EXPR, type, arg0, arg1));
8534 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8535 && TREE_INT_CST_LOW (arg1) == min + 1)
8539 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8540 return fold (build2 (NE_EXPR, type, arg0, arg1));
8542 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8543 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8548 else if (!in_gimple_form
8549 && TREE_INT_CST_HIGH (arg1) == 0
8550 && TREE_INT_CST_LOW (arg1) == signed_max
8551 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8552 /* signed_type does not work on pointer types. */
8553 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8555 /* The following case also applies to X < signed_max+1
8556 and X >= signed_max+1 because previous transformations. */
8557 if (code == LE_EXPR || code == GT_EXPR)
8560 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8561 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8563 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8564 type, fold_convert (st0, arg0),
8565 fold_convert (st1, integer_zero_node)));
8571 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8572 a MINUS_EXPR of a constant, we can convert it into a comparison with
8573 a revised constant as long as no overflow occurs. */
8574 if ((code == EQ_EXPR || code == NE_EXPR)
8575 && TREE_CODE (arg1) == INTEGER_CST
8576 && (TREE_CODE (arg0) == PLUS_EXPR
8577 || TREE_CODE (arg0) == MINUS_EXPR)
8578 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8579 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8580 ? MINUS_EXPR : PLUS_EXPR,
8581 arg1, TREE_OPERAND (arg0, 1), 0))
8582 && ! TREE_CONSTANT_OVERFLOW (tem))
8583 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8585 /* Similarly for a NEGATE_EXPR. */
8586 else if ((code == EQ_EXPR || code == NE_EXPR)
8587 && TREE_CODE (arg0) == NEGATE_EXPR
8588 && TREE_CODE (arg1) == INTEGER_CST
8589 && 0 != (tem = negate_expr (arg1))
8590 && TREE_CODE (tem) == INTEGER_CST
8591 && ! TREE_CONSTANT_OVERFLOW (tem))
8592 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8594 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8595 for !=. Don't do this for ordered comparisons due to overflow. */
8596 else if ((code == NE_EXPR || code == EQ_EXPR)
8597 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8598 return fold (build2 (code, type,
8599 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8601 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8602 && TREE_CODE (arg0) == NOP_EXPR)
8604 /* If we are widening one operand of an integer comparison,
8605 see if the other operand is similarly being widened. Perhaps we
8606 can do the comparison in the narrower type. */
8607 tem = fold_widened_comparison (code, type, arg0, arg1);
8611 /* Or if we are changing signedness. */
8612 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8617 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8618 constant, we can simplify it. */
8619 else if (TREE_CODE (arg1) == INTEGER_CST
8620 && (TREE_CODE (arg0) == MIN_EXPR
8621 || TREE_CODE (arg0) == MAX_EXPR)
8622 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8623 return optimize_minmax_comparison (t);
8625 /* If we are comparing an ABS_EXPR with a constant, we can
8626 convert all the cases into explicit comparisons, but they may
8627 well not be faster than doing the ABS and one comparison.
8628 But ABS (X) <= C is a range comparison, which becomes a subtraction
8629 and a comparison, and is probably faster. */
8630 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8631 && TREE_CODE (arg0) == ABS_EXPR
8632 && ! TREE_SIDE_EFFECTS (arg0)
8633 && (0 != (tem = negate_expr (arg1)))
8634 && TREE_CODE (tem) == INTEGER_CST
8635 && ! TREE_CONSTANT_OVERFLOW (tem))
8636 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8637 build2 (GE_EXPR, type,
8638 TREE_OPERAND (arg0, 0), tem),
8639 build2 (LE_EXPR, type,
8640 TREE_OPERAND (arg0, 0), arg1)));
8642 /* If this is an EQ or NE comparison with zero and ARG0 is
8643 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8644 two operations, but the latter can be done in one less insn
8645 on machines that have only two-operand insns or on which a
8646 constant cannot be the first operand. */
8647 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8648 && TREE_CODE (arg0) == BIT_AND_EXPR)
8650 tree arg00 = TREE_OPERAND (arg0, 0);
8651 tree arg01 = TREE_OPERAND (arg0, 1);
8652 if (TREE_CODE (arg00) == LSHIFT_EXPR
8653 && integer_onep (TREE_OPERAND (arg00, 0)))
8655 fold (build2 (code, type,
8656 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8657 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8658 arg01, TREE_OPERAND (arg00, 1)),
8659 fold_convert (TREE_TYPE (arg0),
8662 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8663 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8665 fold (build2 (code, type,
8666 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8667 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8668 arg00, TREE_OPERAND (arg01, 1)),
8669 fold_convert (TREE_TYPE (arg0),
8674 /* If this is an NE or EQ comparison of zero against the result of a
8675 signed MOD operation whose second operand is a power of 2, make
8676 the MOD operation unsigned since it is simpler and equivalent. */
8677 if ((code == NE_EXPR || code == EQ_EXPR)
8678 && integer_zerop (arg1)
8679 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8680 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8681 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8682 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8683 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8684 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8686 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8687 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8688 fold_convert (newtype,
8689 TREE_OPERAND (arg0, 0)),
8690 fold_convert (newtype,
8691 TREE_OPERAND (arg0, 1))));
8693 return fold (build2 (code, type, newmod,
8694 fold_convert (newtype, arg1)));
8697 /* If this is an NE comparison of zero with an AND of one, remove the
8698 comparison since the AND will give the correct value. */
8699 if (code == NE_EXPR && integer_zerop (arg1)
8700 && TREE_CODE (arg0) == BIT_AND_EXPR
8701 && integer_onep (TREE_OPERAND (arg0, 1)))
8702 return fold_convert (type, arg0);
8704 /* If we have (A & C) == C where C is a power of 2, convert this into
8705 (A & C) != 0. Similarly for NE_EXPR. */
8706 if ((code == EQ_EXPR || code == NE_EXPR)
8707 && TREE_CODE (arg0) == BIT_AND_EXPR
8708 && integer_pow2p (TREE_OPERAND (arg0, 1))
8709 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8710 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8711 arg0, fold_convert (TREE_TYPE (arg0),
8712 integer_zero_node)));
8714 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8715 2, then fold the expression into shifts and logical operations. */
8716 tem = fold_single_bit_test (code, arg0, arg1, type);
8720 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8721 Similarly for NE_EXPR. */
8722 if ((code == EQ_EXPR || code == NE_EXPR)
8723 && TREE_CODE (arg0) == BIT_AND_EXPR
8724 && TREE_CODE (arg1) == INTEGER_CST
8725 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8727 tree notc = fold (build1 (BIT_NOT_EXPR,
8728 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8729 TREE_OPERAND (arg0, 1)));
8730 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8732 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8733 if (integer_nonzerop (dandnotc))
8734 return omit_one_operand (type, rslt, arg0);
8737 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8738 Similarly for NE_EXPR. */
8739 if ((code == EQ_EXPR || code == NE_EXPR)
8740 && TREE_CODE (arg0) == BIT_IOR_EXPR
8741 && TREE_CODE (arg1) == INTEGER_CST
8742 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8744 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8745 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8746 TREE_OPERAND (arg0, 1), notd));
8747 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8748 if (integer_nonzerop (candnotd))
8749 return omit_one_operand (type, rslt, arg0);
8752 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8753 and similarly for >= into !=. */
8754 if ((code == LT_EXPR || code == GE_EXPR)
8755 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8756 && TREE_CODE (arg1) == LSHIFT_EXPR
8757 && integer_onep (TREE_OPERAND (arg1, 0)))
8758 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8759 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8760 TREE_OPERAND (arg1, 1)),
8761 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8763 else if ((code == LT_EXPR || code == GE_EXPR)
8764 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8765 && (TREE_CODE (arg1) == NOP_EXPR
8766 || TREE_CODE (arg1) == CONVERT_EXPR)
8767 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8768 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8770 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8771 fold_convert (TREE_TYPE (arg0),
8772 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8773 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8775 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8777 /* Simplify comparison of something with itself. (For IEEE
8778 floating-point, we can only do some of these simplifications.) */
8779 if (operand_equal_p (arg0, arg1, 0))
8784 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8785 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8786 return constant_boolean_node (1, type);
8791 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8792 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8793 return constant_boolean_node (1, type);
8794 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8797 /* For NE, we can only do this simplification if integer
8798 or we don't honor IEEE floating point NaNs. */
8799 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8800 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8802 /* ... fall through ... */
8805 return constant_boolean_node (0, type);
8811 /* If we are comparing an expression that just has comparisons
8812 of two integer values, arithmetic expressions of those comparisons,
8813 and constants, we can simplify it. There are only three cases
8814 to check: the two values can either be equal, the first can be
8815 greater, or the second can be greater. Fold the expression for
8816 those three values. Since each value must be 0 or 1, we have
8817 eight possibilities, each of which corresponds to the constant 0
8818 or 1 or one of the six possible comparisons.
8820 This handles common cases like (a > b) == 0 but also handles
8821 expressions like ((x > y) - (y > x)) > 0, which supposedly
8822 occur in macroized code. */
8824 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8826 tree cval1 = 0, cval2 = 0;
8829 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8830 /* Don't handle degenerate cases here; they should already
8831 have been handled anyway. */
8832 && cval1 != 0 && cval2 != 0
8833 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8834 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8835 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8836 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8837 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8838 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8839 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8841 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8842 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8844 /* We can't just pass T to eval_subst in case cval1 or cval2
8845 was the same as ARG1. */
8848 = fold (build2 (code, type,
8849 eval_subst (arg0, cval1, maxval,
8853 = fold (build2 (code, type,
8854 eval_subst (arg0, cval1, maxval,
8858 = fold (build2 (code, type,
8859 eval_subst (arg0, cval1, minval,
8863 /* All three of these results should be 0 or 1. Confirm they
8864 are. Then use those values to select the proper code
8867 if ((integer_zerop (high_result)
8868 || integer_onep (high_result))
8869 && (integer_zerop (equal_result)
8870 || integer_onep (equal_result))
8871 && (integer_zerop (low_result)
8872 || integer_onep (low_result)))
8874 /* Make a 3-bit mask with the high-order bit being the
8875 value for `>', the next for '=', and the low for '<'. */
8876 switch ((integer_onep (high_result) * 4)
8877 + (integer_onep (equal_result) * 2)
8878 + integer_onep (low_result))
8882 return omit_one_operand (type, integer_zero_node, arg0);
8903 return omit_one_operand (type, integer_one_node, arg0);
8906 tem = build2 (code, type, cval1, cval2);
8908 return save_expr (tem);
8915 /* If this is a comparison of a field, we may be able to simplify it. */
8916 if (((TREE_CODE (arg0) == COMPONENT_REF
8917 && lang_hooks.can_use_bit_fields_p ())
8918 || TREE_CODE (arg0) == BIT_FIELD_REF)
8919 && (code == EQ_EXPR || code == NE_EXPR)
8920 /* Handle the constant case even without -O
8921 to make sure the warnings are given. */
8922 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8924 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8929 /* If this is a comparison of complex values and either or both sides
8930 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8931 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8932 This may prevent needless evaluations. */
8933 if ((code == EQ_EXPR || code == NE_EXPR)
8934 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8935 && (TREE_CODE (arg0) == COMPLEX_EXPR
8936 || TREE_CODE (arg1) == COMPLEX_EXPR
8937 || TREE_CODE (arg0) == COMPLEX_CST
8938 || TREE_CODE (arg1) == COMPLEX_CST))
8940 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8941 tree real0, imag0, real1, imag1;
8943 arg0 = save_expr (arg0);
8944 arg1 = save_expr (arg1);
8945 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8946 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8947 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8948 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8950 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8953 fold (build2 (code, type, real0, real1)),
8954 fold (build2 (code, type, imag0, imag1))));
8957 /* Optimize comparisons of strlen vs zero to a compare of the
8958 first character of the string vs zero. To wit,
8959 strlen(ptr) == 0 => *ptr == 0
8960 strlen(ptr) != 0 => *ptr != 0
8961 Other cases should reduce to one of these two (or a constant)
8962 due to the return value of strlen being unsigned. */
8963 if ((code == EQ_EXPR || code == NE_EXPR)
8964 && integer_zerop (arg1)
8965 && TREE_CODE (arg0) == CALL_EXPR)
8967 tree fndecl = get_callee_fndecl (arg0);
8971 && DECL_BUILT_IN (fndecl)
8972 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8973 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8974 && (arglist = TREE_OPERAND (arg0, 1))
8975 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8976 && ! TREE_CHAIN (arglist))
8977 return fold (build2 (code, type,
8978 build1 (INDIRECT_REF, char_type_node,
8979 TREE_VALUE (arglist)),
8980 fold_convert (char_type_node,
8981 integer_zero_node)));
8984 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8985 into a single range test. */
8986 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8987 && TREE_CODE (arg1) == INTEGER_CST
8988 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8989 && !integer_zerop (TREE_OPERAND (arg0, 1))
8990 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8991 && !TREE_OVERFLOW (arg1))
8993 t1 = fold_div_compare (code, type, arg0, arg1);
8994 if (t1 != NULL_TREE)
8998 if ((code == EQ_EXPR || code == NE_EXPR)
8999 && !TREE_SIDE_EFFECTS (arg0)
9000 && integer_zerop (arg1)
9001 && tree_expr_nonzero_p (arg0))
9002 return constant_boolean_node (code==NE_EXPR, type);
9004 t1 = fold_relational_const (code, type, arg0, arg1);
9005 return t1 == NULL_TREE ? t : t1;
9007 case UNORDERED_EXPR:
9015 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9017 t1 = fold_relational_const (code, type, arg0, arg1);
9018 if (t1 != NULL_TREE)
9022 /* If the first operand is NaN, the result is constant. */
9023 if (TREE_CODE (arg0) == REAL_CST
9024 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9025 && (code != LTGT_EXPR || ! flag_trapping_math))
9027 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9030 return omit_one_operand (type, t1, arg1);
9033 /* If the second operand is NaN, the result is constant. */
9034 if (TREE_CODE (arg1) == REAL_CST
9035 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9036 && (code != LTGT_EXPR || ! flag_trapping_math))
9038 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9041 return omit_one_operand (type, t1, arg0);
9044 /* Simplify unordered comparison of something with itself. */
9045 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9046 && operand_equal_p (arg0, arg1, 0))
9047 return constant_boolean_node (1, type);
9049 if (code == LTGT_EXPR
9050 && !flag_trapping_math
9051 && operand_equal_p (arg0, arg1, 0))
9052 return constant_boolean_node (0, type);
9054 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9056 tree targ0 = strip_float_extensions (arg0);
9057 tree targ1 = strip_float_extensions (arg1);
9058 tree newtype = TREE_TYPE (targ0);
9060 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9061 newtype = TREE_TYPE (targ1);
9063 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9064 return fold (build2 (code, type, fold_convert (newtype, targ0),
9065 fold_convert (newtype, targ1)));
9071 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9072 so all simple results must be passed through pedantic_non_lvalue. */
9073 if (TREE_CODE (arg0) == INTEGER_CST)
9075 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9076 /* Only optimize constant conditions when the selected branch
9077 has the same type as the COND_EXPR. This avoids optimizing
9078 away "c ? x : throw", where the throw has a void type. */
9079 if (! VOID_TYPE_P (TREE_TYPE (tem))
9080 || VOID_TYPE_P (type))
9081 return pedantic_non_lvalue (tem);
9084 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9085 return pedantic_omit_one_operand (type, arg1, arg0);
9087 /* If we have A op B ? A : C, we may be able to convert this to a
9088 simpler expression, depending on the operation and the values
9089 of B and C. Signed zeros prevent all of these transformations,
9090 for reasons given above each one.
9092 Also try swapping the arguments and inverting the conditional. */
9093 if (COMPARISON_CLASS_P (arg0)
9094 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9095 arg1, TREE_OPERAND (arg0, 1))
9096 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9098 tem = fold_cond_expr_with_comparison (type, arg0,
9099 TREE_OPERAND (t, 1),
9100 TREE_OPERAND (t, 2));
9105 if (COMPARISON_CLASS_P (arg0)
9106 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9107 TREE_OPERAND (t, 2),
9108 TREE_OPERAND (arg0, 1))
9109 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9111 tem = invert_truthvalue (arg0);
9112 if (COMPARISON_CLASS_P (tem))
9114 tem = fold_cond_expr_with_comparison (type, tem,
9115 TREE_OPERAND (t, 2),
9116 TREE_OPERAND (t, 1));
9122 /* If the second operand is simpler than the third, swap them
9123 since that produces better jump optimization results. */
9124 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9125 TREE_OPERAND (t, 2), false))
9127 /* See if this can be inverted. If it can't, possibly because
9128 it was a floating-point inequality comparison, don't do
9130 tem = invert_truthvalue (arg0);
9132 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9133 return fold (build3 (code, type, tem,
9134 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9137 /* Convert A ? 1 : 0 to simply A. */
9138 if (integer_onep (TREE_OPERAND (t, 1))
9139 && integer_zerop (TREE_OPERAND (t, 2))
9140 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9141 call to fold will try to move the conversion inside
9142 a COND, which will recurse. In that case, the COND_EXPR
9143 is probably the best choice, so leave it alone. */
9144 && type == TREE_TYPE (arg0))
9145 return pedantic_non_lvalue (arg0);
9147 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9148 over COND_EXPR in cases such as floating point comparisons. */
9149 if (integer_zerop (TREE_OPERAND (t, 1))
9150 && integer_onep (TREE_OPERAND (t, 2))
9151 && truth_value_p (TREE_CODE (arg0)))
9152 return pedantic_non_lvalue (fold_convert (type,
9153 invert_truthvalue (arg0)));
9155 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9156 if (TREE_CODE (arg0) == LT_EXPR
9157 && integer_zerop (TREE_OPERAND (arg0, 1))
9158 && integer_zerop (TREE_OPERAND (t, 2))
9159 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9160 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9161 TREE_TYPE (tem), tem, arg1)));
9163 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9164 already handled above. */
9165 if (TREE_CODE (arg0) == BIT_AND_EXPR
9166 && integer_onep (TREE_OPERAND (arg0, 1))
9167 && integer_zerop (TREE_OPERAND (t, 2))
9168 && integer_pow2p (arg1))
9170 tree tem = TREE_OPERAND (arg0, 0);
9172 if (TREE_CODE (tem) == RSHIFT_EXPR
9173 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9174 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9175 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9176 return fold (build2 (BIT_AND_EXPR, type,
9177 TREE_OPERAND (tem, 0), arg1));
9180 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9181 is probably obsolete because the first operand should be a
9182 truth value (that's why we have the two cases above), but let's
9183 leave it in until we can confirm this for all front-ends. */
9184 if (integer_zerop (TREE_OPERAND (t, 2))
9185 && TREE_CODE (arg0) == NE_EXPR
9186 && integer_zerop (TREE_OPERAND (arg0, 1))
9187 && integer_pow2p (arg1)
9188 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9189 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9190 arg1, OEP_ONLY_CONST))
9191 return pedantic_non_lvalue (fold_convert (type,
9192 TREE_OPERAND (arg0, 0)));
9194 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9195 if (integer_zerop (TREE_OPERAND (t, 2))
9196 && truth_value_p (TREE_CODE (arg0))
9197 && truth_value_p (TREE_CODE (arg1)))
9198 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9200 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9201 if (integer_onep (TREE_OPERAND (t, 2))
9202 && truth_value_p (TREE_CODE (arg0))
9203 && truth_value_p (TREE_CODE (arg1)))
9205 /* Only perform transformation if ARG0 is easily inverted. */
9206 tem = invert_truthvalue (arg0);
9207 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9208 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9211 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9212 if (integer_zerop (arg1)
9213 && truth_value_p (TREE_CODE (arg0))
9214 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9216 /* Only perform transformation if ARG0 is easily inverted. */
9217 tem = invert_truthvalue (arg0);
9218 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9219 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9220 TREE_OPERAND (t, 2)));
9223 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9224 if (integer_onep (arg1)
9225 && truth_value_p (TREE_CODE (arg0))
9226 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9227 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9228 TREE_OPERAND (t, 2)));
9233 /* When pedantic, a compound expression can be neither an lvalue
9234 nor an integer constant expression. */
9235 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9237 /* Don't let (0, 0) be null pointer constant. */
9238 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9239 : fold_convert (type, arg1);
9240 return pedantic_non_lvalue (tem);
9244 return build_complex (type, arg0, arg1);
9248 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9250 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9251 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9252 TREE_OPERAND (arg0, 1));
9253 else if (TREE_CODE (arg0) == COMPLEX_CST)
9254 return TREE_REALPART (arg0);
9255 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9256 return fold (build2 (TREE_CODE (arg0), type,
9257 fold (build1 (REALPART_EXPR, type,
9258 TREE_OPERAND (arg0, 0))),
9259 fold (build1 (REALPART_EXPR, type,
9260 TREE_OPERAND (arg0, 1)))));
9264 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9265 return fold_convert (type, integer_zero_node);
9266 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9267 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9268 TREE_OPERAND (arg0, 0));
9269 else if (TREE_CODE (arg0) == COMPLEX_CST)
9270 return TREE_IMAGPART (arg0);
9271 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9272 return fold (build2 (TREE_CODE (arg0), type,
9273 fold (build1 (IMAGPART_EXPR, type,
9274 TREE_OPERAND (arg0, 0))),
9275 fold (build1 (IMAGPART_EXPR, type,
9276 TREE_OPERAND (arg0, 1)))));
9280 /* Check for a built-in function. */
9281 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9282 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9284 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9286 tree tmp = fold_builtin (t, false);
9294 } /* switch (code) */
9297 #ifdef ENABLE_FOLD_CHECKING
9300 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9301 static void fold_check_failed (tree, tree);
9302 void print_fold_checksum (tree);
9304 /* When --enable-checking=fold, compute a digest of expr before
9305 and after actual fold call to see if fold did not accidentally
9306 change original expr. */
9313 unsigned char checksum_before[16], checksum_after[16];
9316 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9317 md5_init_ctx (&ctx);
9318 fold_checksum_tree (expr, &ctx, ht);
9319 md5_finish_ctx (&ctx, checksum_before);
9322 ret = fold_1 (expr);
9324 md5_init_ctx (&ctx);
9325 fold_checksum_tree (expr, &ctx, ht);
9326 md5_finish_ctx (&ctx, checksum_after);
9329 if (memcmp (checksum_before, checksum_after, 16))
9330 fold_check_failed (expr, ret);
9336 print_fold_checksum (tree expr)
9339 unsigned char checksum[16], cnt;
9342 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9343 md5_init_ctx (&ctx);
9344 fold_checksum_tree (expr, &ctx, ht);
9345 md5_finish_ctx (&ctx, checksum);
9347 for (cnt = 0; cnt < 16; ++cnt)
9348 fprintf (stderr, "%02x", checksum[cnt]);
9349 putc ('\n', stderr);
9353 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9355 internal_error ("fold check: original tree changed by fold");
9359 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9362 enum tree_code code;
9363 char buf[sizeof (struct tree_decl)];
9366 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9367 <= sizeof (struct tree_decl))
9368 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9371 slot = htab_find_slot (ht, expr, INSERT);
9375 code = TREE_CODE (expr);
9376 if (TREE_CODE_CLASS (code) == tcc_declaration
9377 && DECL_ASSEMBLER_NAME_SET_P (expr))
9379 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9380 memcpy (buf, expr, tree_size (expr));
9382 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9384 else if (TREE_CODE_CLASS (code) == tcc_type
9385 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9386 || TYPE_CACHED_VALUES_P (expr)))
9388 /* Allow these fields to be modified. */
9389 memcpy (buf, expr, tree_size (expr));
9391 TYPE_POINTER_TO (expr) = NULL;
9392 TYPE_REFERENCE_TO (expr) = NULL;
9393 TYPE_CACHED_VALUES_P (expr) = 0;
9394 TYPE_CACHED_VALUES (expr) = NULL;
9396 md5_process_bytes (expr, tree_size (expr), ctx);
9397 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9398 if (TREE_CODE_CLASS (code) != tcc_type
9399 && TREE_CODE_CLASS (code) != tcc_declaration)
9400 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9401 switch (TREE_CODE_CLASS (code))
9407 md5_process_bytes (TREE_STRING_POINTER (expr),
9408 TREE_STRING_LENGTH (expr), ctx);
9411 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9412 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9415 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9421 case tcc_exceptional:
9425 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9426 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9429 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9430 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9436 case tcc_expression:
9438 case tcc_comparison:
9442 len = TREE_CODE_LENGTH (code);
9443 for (i = 0; i < len; ++i)
9444 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9446 case tcc_declaration:
9447 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9448 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9449 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9450 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9451 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9452 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9453 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9454 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9455 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9456 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9457 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9460 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9461 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9462 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9463 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9464 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9465 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9466 if (INTEGRAL_TYPE_P (expr)
9467 || SCALAR_FLOAT_TYPE_P (expr))
9469 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9470 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9472 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9473 if (TREE_CODE (expr) == RECORD_TYPE
9474 || TREE_CODE (expr) == UNION_TYPE
9475 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9476 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9477 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9486 /* Perform constant folding and related simplification of initializer
9487 expression EXPR. This behaves identically to "fold" but ignores
9488 potential run-time traps and exceptions that fold must preserve. */
9491 fold_initializer (tree expr)
9493 int saved_signaling_nans = flag_signaling_nans;
9494 int saved_trapping_math = flag_trapping_math;
9495 int saved_trapv = flag_trapv;
9498 flag_signaling_nans = 0;
9499 flag_trapping_math = 0;
9502 result = fold (expr);
9504 flag_signaling_nans = saved_signaling_nans;
9505 flag_trapping_math = saved_trapping_math;
9506 flag_trapv = saved_trapv;
9511 /* Determine if first argument is a multiple of second argument. Return 0 if
9512 it is not, or we cannot easily determined it to be.
9514 An example of the sort of thing we care about (at this point; this routine
9515 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9516 fold cases do now) is discovering that
9518 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9524 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9526 This code also handles discovering that
9528 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9530 is a multiple of 8 so we don't have to worry about dealing with a
9533 Note that we *look* inside a SAVE_EXPR only to determine how it was
9534 calculated; it is not safe for fold to do much of anything else with the
9535 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9536 at run time. For example, the latter example above *cannot* be implemented
9537 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9538 evaluation time of the original SAVE_EXPR is not necessarily the same at
9539 the time the new expression is evaluated. The only optimization of this
9540 sort that would be valid is changing
9542 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9546 SAVE_EXPR (I) * SAVE_EXPR (J)
9548 (where the same SAVE_EXPR (J) is used in the original and the
9549 transformed version). */
9552 multiple_of_p (tree type, tree top, tree bottom)
9554 if (operand_equal_p (top, bottom, 0))
9557 if (TREE_CODE (type) != INTEGER_TYPE)
9560 switch (TREE_CODE (top))
9563 /* Bitwise and provides a power of two multiple. If the mask is
9564 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
9565 if (!integer_pow2p (bottom))
9570 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9571 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9575 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9576 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9579 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9583 op1 = TREE_OPERAND (top, 1);
9584 /* const_binop may not detect overflow correctly,
9585 so check for it explicitly here. */
9586 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9587 > TREE_INT_CST_LOW (op1)
9588 && TREE_INT_CST_HIGH (op1) == 0
9589 && 0 != (t1 = fold_convert (type,
9590 const_binop (LSHIFT_EXPR,
9593 && ! TREE_OVERFLOW (t1))
9594 return multiple_of_p (type, t1, bottom);
9599 /* Can't handle conversions from non-integral or wider integral type. */
9600 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9601 || (TYPE_PRECISION (type)
9602 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9605 /* .. fall through ... */
9608 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9611 if (TREE_CODE (bottom) != INTEGER_CST
9612 || (TYPE_UNSIGNED (type)
9613 && (tree_int_cst_sgn (top) < 0
9614 || tree_int_cst_sgn (bottom) < 0)))
9616 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9624 /* Return true if `t' is known to be non-negative. */
9627 tree_expr_nonnegative_p (tree t)
9629 switch (TREE_CODE (t))
9635 return tree_int_cst_sgn (t) >= 0;
9638 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9641 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9642 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9643 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9645 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9646 both unsigned and at least 2 bits shorter than the result. */
9647 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9648 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9649 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9651 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9652 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9653 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9654 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9656 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9657 TYPE_PRECISION (inner2)) + 1;
9658 return prec < TYPE_PRECISION (TREE_TYPE (t));
9664 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9666 /* x * x for floating point x is always non-negative. */
9667 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9669 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9670 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9673 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9674 both unsigned and their total bits is shorter than the result. */
9675 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9676 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9677 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9679 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9680 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9681 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9682 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9683 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9684 < TYPE_PRECISION (TREE_TYPE (t));
9688 case TRUNC_DIV_EXPR:
9690 case FLOOR_DIV_EXPR:
9691 case ROUND_DIV_EXPR:
9692 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9693 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9695 case TRUNC_MOD_EXPR:
9697 case FLOOR_MOD_EXPR:
9698 case ROUND_MOD_EXPR:
9699 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9702 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9703 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9706 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9707 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9710 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9711 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9715 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9716 tree outer_type = TREE_TYPE (t);
9718 if (TREE_CODE (outer_type) == REAL_TYPE)
9720 if (TREE_CODE (inner_type) == REAL_TYPE)
9721 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9722 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9724 if (TYPE_UNSIGNED (inner_type))
9726 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9729 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9731 if (TREE_CODE (inner_type) == REAL_TYPE)
9732 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9733 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9734 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9735 && TYPE_UNSIGNED (inner_type);
9741 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9742 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9744 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9746 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9747 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9749 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9750 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9752 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9754 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9756 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9757 case NON_LVALUE_EXPR:
9758 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9760 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9764 tree temp = TARGET_EXPR_SLOT (t);
9765 t = TARGET_EXPR_INITIAL (t);
9767 /* If the initializer is non-void, then it's a normal expression
9768 that will be assigned to the slot. */
9769 if (!VOID_TYPE_P (t))
9770 return tree_expr_nonnegative_p (t);
9772 /* Otherwise, the initializer sets the slot in some way. One common
9773 way is an assignment statement at the end of the initializer. */
9776 if (TREE_CODE (t) == BIND_EXPR)
9777 t = expr_last (BIND_EXPR_BODY (t));
9778 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9779 || TREE_CODE (t) == TRY_CATCH_EXPR)
9780 t = expr_last (TREE_OPERAND (t, 0));
9781 else if (TREE_CODE (t) == STATEMENT_LIST)
9786 if (TREE_CODE (t) == MODIFY_EXPR
9787 && TREE_OPERAND (t, 0) == temp)
9788 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9795 tree fndecl = get_callee_fndecl (t);
9796 tree arglist = TREE_OPERAND (t, 1);
9798 && DECL_BUILT_IN (fndecl)
9799 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9800 switch (DECL_FUNCTION_CODE (fndecl))
9802 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9803 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9804 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9805 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9807 CASE_BUILTIN_F (BUILT_IN_ACOS)
9808 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9809 CASE_BUILTIN_F (BUILT_IN_CABS)
9810 CASE_BUILTIN_F (BUILT_IN_COSH)
9811 CASE_BUILTIN_F (BUILT_IN_ERFC)
9812 CASE_BUILTIN_F (BUILT_IN_EXP)
9813 CASE_BUILTIN_F (BUILT_IN_EXP10)
9814 CASE_BUILTIN_F (BUILT_IN_EXP2)
9815 CASE_BUILTIN_F (BUILT_IN_FABS)
9816 CASE_BUILTIN_F (BUILT_IN_FDIM)
9817 CASE_BUILTIN_F (BUILT_IN_FREXP)
9818 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9819 CASE_BUILTIN_F (BUILT_IN_POW10)
9820 CASE_BUILTIN_I (BUILT_IN_FFS)
9821 CASE_BUILTIN_I (BUILT_IN_PARITY)
9822 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9826 CASE_BUILTIN_F (BUILT_IN_SQRT)
9827 /* sqrt(-0.0) is -0.0. */
9828 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9830 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9832 CASE_BUILTIN_F (BUILT_IN_ASINH)
9833 CASE_BUILTIN_F (BUILT_IN_ATAN)
9834 CASE_BUILTIN_F (BUILT_IN_ATANH)
9835 CASE_BUILTIN_F (BUILT_IN_CBRT)
9836 CASE_BUILTIN_F (BUILT_IN_CEIL)
9837 CASE_BUILTIN_F (BUILT_IN_ERF)
9838 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9839 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9840 CASE_BUILTIN_F (BUILT_IN_FMOD)
9841 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9842 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9843 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9844 CASE_BUILTIN_F (BUILT_IN_LRINT)
9845 CASE_BUILTIN_F (BUILT_IN_LROUND)
9846 CASE_BUILTIN_F (BUILT_IN_MODF)
9847 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9848 CASE_BUILTIN_F (BUILT_IN_POW)
9849 CASE_BUILTIN_F (BUILT_IN_RINT)
9850 CASE_BUILTIN_F (BUILT_IN_ROUND)
9851 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9852 CASE_BUILTIN_F (BUILT_IN_SINH)
9853 CASE_BUILTIN_F (BUILT_IN_TANH)
9854 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9855 /* True if the 1st argument is nonnegative. */
9856 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9858 CASE_BUILTIN_F (BUILT_IN_FMAX)
9859 /* True if the 1st OR 2nd arguments are nonnegative. */
9860 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9861 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9863 CASE_BUILTIN_F (BUILT_IN_FMIN)
9864 /* True if the 1st AND 2nd arguments are nonnegative. */
9865 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9866 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9868 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9869 /* True if the 2nd argument is nonnegative. */
9870 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9874 #undef CASE_BUILTIN_F
9875 #undef CASE_BUILTIN_I
9879 /* ... fall through ... */
9882 if (truth_value_p (TREE_CODE (t)))
9883 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9887 /* We don't know sign of `t', so be conservative and return false. */
9891 /* Return true when T is an address and is known to be nonzero.
9892 For floating point we further ensure that T is not denormal.
9893 Similar logic is present in nonzero_address in rtlanal.h. */
9896 tree_expr_nonzero_p (tree t)
9898 tree type = TREE_TYPE (t);
9900 /* Doing something useful for floating point would need more work. */
9901 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9904 switch (TREE_CODE (t))
9907 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9908 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9911 /* We used to test for !integer_zerop here. This does not work correctly
9912 if TREE_CONSTANT_OVERFLOW (t). */
9913 return (TREE_INT_CST_LOW (t) != 0
9914 || TREE_INT_CST_HIGH (t) != 0);
9917 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9919 /* With the presence of negative values it is hard
9920 to say something. */
9921 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9922 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9924 /* One of operands must be positive and the other non-negative. */
9925 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9926 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9931 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9933 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9934 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9940 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9941 tree outer_type = TREE_TYPE (t);
9943 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9944 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9950 tree base = get_base_address (TREE_OPERAND (t, 0));
9955 /* Weak declarations may link to NULL. */
9957 return !DECL_WEAK (base);
9959 /* Constants are never weak. */
9960 if (CONSTANT_CLASS_P (base))
9967 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9968 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9971 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9972 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9975 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9977 /* When both operands are nonzero, then MAX must be too. */
9978 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9981 /* MAX where operand 0 is positive is positive. */
9982 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9984 /* MAX where operand 1 is positive is positive. */
9985 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9986 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9993 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9996 case NON_LVALUE_EXPR:
9997 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10000 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10001 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10009 /* See if we are applying CODE, a relational to the highest or lowest
10010 possible integer of TYPE. If so, then the result is a compile
10014 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10019 enum tree_code code = *code_p;
10020 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10022 if (TREE_CODE (op1) == INTEGER_CST
10023 && ! TREE_CONSTANT_OVERFLOW (op1)
10024 && width <= HOST_BITS_PER_WIDE_INT
10025 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10026 || POINTER_TYPE_P (TREE_TYPE (op1))))
10028 unsigned HOST_WIDE_INT signed_max;
10029 unsigned HOST_WIDE_INT max, min;
10031 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10033 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10035 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10041 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10044 if (TREE_INT_CST_HIGH (op1) == 0
10045 && TREE_INT_CST_LOW (op1) == max)
10049 return omit_one_operand (type, integer_zero_node, op0);
10055 return omit_one_operand (type, integer_one_node, op0);
10061 /* The GE_EXPR and LT_EXPR cases above are not normally
10062 reached because of previous transformations. */
10067 else if (TREE_INT_CST_HIGH (op1) == 0
10068 && TREE_INT_CST_LOW (op1) == max - 1)
10073 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10077 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10082 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10083 && TREE_INT_CST_LOW (op1) == min)
10087 return omit_one_operand (type, integer_zero_node, op0);
10094 return omit_one_operand (type, integer_one_node, op0);
10103 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10104 && TREE_INT_CST_LOW (op1) == min + 1)
10109 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10113 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10119 else if (TREE_INT_CST_HIGH (op1) == 0
10120 && TREE_INT_CST_LOW (op1) == signed_max
10121 && TYPE_UNSIGNED (TREE_TYPE (op1))
10122 /* signed_type does not work on pointer types. */
10123 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10125 /* The following case also applies to X < signed_max+1
10126 and X >= signed_max+1 because previous transformations. */
10127 if (code == LE_EXPR || code == GT_EXPR)
10129 tree st0, st1, exp, retval;
10130 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10131 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10133 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10135 fold_convert (st0, op0),
10136 fold_convert (st1, integer_zero_node));
10138 retval = fold_binary_to_constant (TREE_CODE (exp),
10140 TREE_OPERAND (exp, 0),
10141 TREE_OPERAND (exp, 1));
10143 /* If we are in gimple form, then returning EXP would create
10144 non-gimple expressions. Clearing it is safe and insures
10145 we do not allow a non-gimple expression to escape. */
10146 if (in_gimple_form)
10149 return (retval ? retval : exp);
10158 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10159 attempt to fold the expression to a constant without modifying TYPE,
10162 If the expression could be simplified to a constant, then return
10163 the constant. If the expression would not be simplified to a
10164 constant, then return NULL_TREE.
10166 Note this is primarily designed to be called after gimplification
10167 of the tree structures and when at least one operand is a constant.
10168 As a result of those simplifying assumptions this routine is far
10169 simpler than the generic fold routine. */
10172 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10179 /* If this is a commutative operation, and ARG0 is a constant, move it
10180 to ARG1 to reduce the number of tests below. */
10181 if (commutative_tree_code (code)
10182 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10189 /* If either operand is a complex type, extract its real component. */
10190 if (TREE_CODE (op0) == COMPLEX_CST)
10191 subop0 = TREE_REALPART (op0);
10195 if (TREE_CODE (op1) == COMPLEX_CST)
10196 subop1 = TREE_REALPART (op1);
10200 /* Note if either argument is not a real or integer constant.
10201 With a few exceptions, simplification is limited to cases
10202 where both arguments are constants. */
10203 if ((TREE_CODE (subop0) != INTEGER_CST
10204 && TREE_CODE (subop0) != REAL_CST)
10205 || (TREE_CODE (subop1) != INTEGER_CST
10206 && TREE_CODE (subop1) != REAL_CST))
10212 /* (plus (address) (const_int)) is a constant. */
10213 if (TREE_CODE (op0) == PLUS_EXPR
10214 && TREE_CODE (op1) == INTEGER_CST
10215 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10216 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10217 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10219 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10221 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10222 const_binop (PLUS_EXPR, op1,
10223 TREE_OPERAND (op0, 1), 0));
10231 /* Both arguments are constants. Simplify. */
10232 tem = const_binop (code, op0, op1, 0);
10233 if (tem != NULL_TREE)
10235 /* The return value should always have the same type as
10236 the original expression. */
10237 if (TREE_TYPE (tem) != type)
10238 tem = fold_convert (type, tem);
10245 /* Fold &x - &x. This can happen from &x.foo - &x.
10246 This is unsafe for certain floats even in non-IEEE formats.
10247 In IEEE, it is unsafe because it does wrong for NaNs.
10248 Also note that operand_equal_p is always false if an
10249 operand is volatile. */
10250 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10251 return fold_convert (type, integer_zero_node);
10257 /* Special case multiplication or bitwise AND where one argument
10259 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10260 return omit_one_operand (type, op1, op0);
10262 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10263 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10264 && real_zerop (op1))
10265 return omit_one_operand (type, op1, op0);
10270 /* Special case when we know the result will be all ones. */
10271 if (integer_all_onesp (op1))
10272 return omit_one_operand (type, op1, op0);
10276 case TRUNC_DIV_EXPR:
10277 case ROUND_DIV_EXPR:
10278 case FLOOR_DIV_EXPR:
10279 case CEIL_DIV_EXPR:
10280 case EXACT_DIV_EXPR:
10281 case TRUNC_MOD_EXPR:
10282 case ROUND_MOD_EXPR:
10283 case FLOOR_MOD_EXPR:
10284 case CEIL_MOD_EXPR:
10286 /* Division by zero is undefined. */
10287 if (integer_zerop (op1))
10290 if (TREE_CODE (op1) == REAL_CST
10291 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10292 && real_zerop (op1))
10298 if (INTEGRAL_TYPE_P (type)
10299 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10300 return omit_one_operand (type, op1, op0);
10305 if (INTEGRAL_TYPE_P (type)
10306 && TYPE_MAX_VALUE (type)
10307 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10308 return omit_one_operand (type, op1, op0);
10313 /* Optimize -1 >> x for arithmetic right shifts. */
10314 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10315 return omit_one_operand (type, op0, op1);
10316 /* ... fall through ... */
10319 if (integer_zerop (op0))
10320 return omit_one_operand (type, op0, op1);
10322 /* Since negative shift count is not well-defined, don't
10323 try to compute it in the compiler. */
10324 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10331 /* -1 rotated either direction by any amount is still -1. */
10332 if (integer_all_onesp (op0))
10333 return omit_one_operand (type, op0, op1);
10335 /* 0 rotated either direction by any amount is still zero. */
10336 if (integer_zerop (op0))
10337 return omit_one_operand (type, op0, op1);
10343 return build_complex (type, op0, op1);
10352 /* If one arg is a real or integer constant, put it last. */
10353 if ((TREE_CODE (op0) == INTEGER_CST
10354 && TREE_CODE (op1) != INTEGER_CST)
10355 || (TREE_CODE (op0) == REAL_CST
10356 && TREE_CODE (op0) != REAL_CST))
10363 code = swap_tree_comparison (code);
10366 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10367 This transformation affects the cases which are handled in later
10368 optimizations involving comparisons with non-negative constants. */
10369 if (TREE_CODE (op1) == INTEGER_CST
10370 && TREE_CODE (op0) != INTEGER_CST
10371 && tree_int_cst_sgn (op1) > 0)
10377 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10382 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10390 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10394 /* Fall through. */
10397 case UNORDERED_EXPR:
10407 return fold_relational_const (code, type, op0, op1);
10410 /* This could probably be handled. */
10413 case TRUTH_AND_EXPR:
10414 /* If second arg is constant zero, result is zero, but first arg
10415 must be evaluated. */
10416 if (integer_zerop (op1))
10417 return omit_one_operand (type, op1, op0);
10418 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10419 case will be handled here. */
10420 if (integer_zerop (op0))
10421 return omit_one_operand (type, op0, op1);
10422 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10423 return constant_boolean_node (true, type);
10426 case TRUTH_OR_EXPR:
10427 /* If second arg is constant true, result is true, but we must
10428 evaluate first arg. */
10429 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10430 return omit_one_operand (type, op1, op0);
10431 /* Likewise for first arg, but note this only occurs here for
10433 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10434 return omit_one_operand (type, op0, op1);
10435 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10436 return constant_boolean_node (false, type);
10439 case TRUTH_XOR_EXPR:
10440 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10442 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10443 return constant_boolean_node (x, type);
10452 /* Given the components of a unary expression CODE, TYPE and OP0,
10453 attempt to fold the expression to a constant without modifying
10456 If the expression could be simplified to a constant, then return
10457 the constant. If the expression would not be simplified to a
10458 constant, then return NULL_TREE.
10460 Note this is primarily designed to be called after gimplification
10461 of the tree structures and when op0 is a constant. As a result
10462 of those simplifying assumptions this routine is far simpler than
10463 the generic fold routine. */
10466 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10468 /* Make sure we have a suitable constant argument. */
10469 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10473 if (TREE_CODE (op0) == COMPLEX_CST)
10474 subop = TREE_REALPART (op0);
10478 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10487 case FIX_TRUNC_EXPR:
10488 case FIX_FLOOR_EXPR:
10489 case FIX_CEIL_EXPR:
10490 return fold_convert_const (code, type, op0);
10493 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10494 return fold_negate_const (op0, type);
10499 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10500 return fold_abs_const (op0, type);
10505 if (TREE_CODE (op0) == INTEGER_CST)
10506 return fold_not_const (op0, type);
10510 case REALPART_EXPR:
10511 if (TREE_CODE (op0) == COMPLEX_CST)
10512 return TREE_REALPART (op0);
10516 case IMAGPART_EXPR:
10517 if (TREE_CODE (op0) == COMPLEX_CST)
10518 return TREE_IMAGPART (op0);
10523 if (TREE_CODE (op0) == COMPLEX_CST
10524 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10525 return build_complex (type, TREE_REALPART (op0),
10526 negate_expr (TREE_IMAGPART (op0)));
10534 /* If EXP represents referencing an element in a constant string
10535 (either via pointer arithmetic or array indexing), return the
10536 tree representing the value accessed, otherwise return NULL. */
10539 fold_read_from_constant_string (tree exp)
10541 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10543 tree exp1 = TREE_OPERAND (exp, 0);
10547 if (TREE_CODE (exp) == INDIRECT_REF)
10548 string = string_constant (exp1, &index);
10551 tree low_bound = array_ref_low_bound (exp);
10552 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10554 /* Optimize the special-case of a zero lower bound.
10556 We convert the low_bound to sizetype to avoid some problems
10557 with constant folding. (E.g. suppose the lower bound is 1,
10558 and its mode is QI. Without the conversion,l (ARRAY
10559 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10560 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10561 if (! integer_zerop (low_bound))
10562 index = size_diffop (index, fold_convert (sizetype, low_bound));
10568 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10569 && TREE_CODE (string) == STRING_CST
10570 && TREE_CODE (index) == INTEGER_CST
10571 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10572 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10574 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10575 return fold_convert (TREE_TYPE (exp),
10576 build_int_cst (NULL_TREE,
10577 (TREE_STRING_POINTER (string)
10578 [TREE_INT_CST_LOW (index)])));
10583 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10584 an integer constant or real constant.
10586 TYPE is the type of the result. */
10589 fold_negate_const (tree arg0, tree type)
10591 tree t = NULL_TREE;
10593 switch (TREE_CODE (arg0))
10597 unsigned HOST_WIDE_INT low;
10598 HOST_WIDE_INT high;
10599 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10600 TREE_INT_CST_HIGH (arg0),
10602 t = build_int_cst_wide (type, low, high);
10603 t = force_fit_type (t, 1,
10604 (overflow | TREE_OVERFLOW (arg0))
10605 && !TYPE_UNSIGNED (type),
10606 TREE_CONSTANT_OVERFLOW (arg0));
10611 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10615 gcc_unreachable ();
10621 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10622 an integer constant or real constant.
10624 TYPE is the type of the result. */
10627 fold_abs_const (tree arg0, tree type)
10629 tree t = NULL_TREE;
10631 switch (TREE_CODE (arg0))
10634 /* If the value is unsigned, then the absolute value is
10635 the same as the ordinary value. */
10636 if (TYPE_UNSIGNED (type))
10638 /* Similarly, if the value is non-negative. */
10639 else if (INT_CST_LT (integer_minus_one_node, arg0))
10641 /* If the value is negative, then the absolute value is
10645 unsigned HOST_WIDE_INT low;
10646 HOST_WIDE_INT high;
10647 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10648 TREE_INT_CST_HIGH (arg0),
10650 t = build_int_cst_wide (type, low, high);
10651 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10652 TREE_CONSTANT_OVERFLOW (arg0));
10657 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10658 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10664 gcc_unreachable ();
10670 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10671 constant. TYPE is the type of the result. */
10674 fold_not_const (tree arg0, tree type)
10676 tree t = NULL_TREE;
10678 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10680 t = build_int_cst_wide (type,
10681 ~ TREE_INT_CST_LOW (arg0),
10682 ~ TREE_INT_CST_HIGH (arg0));
10683 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10684 TREE_CONSTANT_OVERFLOW (arg0));
10689 /* Given CODE, a relational operator, the target type, TYPE and two
10690 constant operands OP0 and OP1, return the result of the
10691 relational operation. If the result is not a compile time
10692 constant, then return NULL_TREE. */
10695 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10697 int result, invert;
10699 /* From here on, the only cases we handle are when the result is
10700 known to be a constant. */
10702 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10704 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10705 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10707 /* Handle the cases where either operand is a NaN. */
10708 if (real_isnan (c0) || real_isnan (c1))
10718 case UNORDERED_EXPR:
10732 if (flag_trapping_math)
10738 gcc_unreachable ();
10741 return constant_boolean_node (result, type);
10744 return constant_boolean_node (real_compare (code, c0, c1), type);
10747 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10749 To compute GT, swap the arguments and do LT.
10750 To compute GE, do LT and invert the result.
10751 To compute LE, swap the arguments, do LT and invert the result.
10752 To compute NE, do EQ and invert the result.
10754 Therefore, the code below must handle only EQ and LT. */
10756 if (code == LE_EXPR || code == GT_EXPR)
10761 code = swap_tree_comparison (code);
10764 /* Note that it is safe to invert for real values here because we
10765 have already handled the one case that it matters. */
10768 if (code == NE_EXPR || code == GE_EXPR)
10771 code = invert_tree_comparison (code, false);
10774 /* Compute a result for LT or EQ if args permit;
10775 Otherwise return T. */
10776 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10778 if (code == EQ_EXPR)
10779 result = tree_int_cst_equal (op0, op1);
10780 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10781 result = INT_CST_LT_UNSIGNED (op0, op1);
10783 result = INT_CST_LT (op0, op1);
10790 return constant_boolean_node (result, type);
10793 /* Build an expression for the a clean point containing EXPR with type TYPE.
10794 Don't build a cleanup point expression for EXPR which don't have side
10798 fold_build_cleanup_point_expr (tree type, tree expr)
10800 /* If the expression does not have side effects then we don't have to wrap
10801 it with a cleanup point expression. */
10802 if (!TREE_SIDE_EFFECTS (expr))
10805 /* If the expression is a return, check to see if the expression inside the
10806 return has no side effects or the right hand side of the modify expression
10807 inside the return. If either don't have side effects set we don't need to
10808 wrap the expression in a cleanup point expression. Note we don't check the
10809 left hand side of the modify because it should always be a return decl. */
10810 if (TREE_CODE (expr) == RETURN_EXPR)
10812 tree op = TREE_OPERAND (expr, 0);
10813 if (!op || !TREE_SIDE_EFFECTS (op))
10815 op = TREE_OPERAND (op, 1);
10816 if (!TREE_SIDE_EFFECTS (op))
10820 return build1 (CLEANUP_POINT_EXPR, type, expr);
10823 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10824 avoid confusing the gimplify process. */
10827 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10829 /* The size of the object is not relevant when talking about its address. */
10830 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10831 t = TREE_OPERAND (t, 0);
10833 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10834 if (TREE_CODE (t) == INDIRECT_REF
10835 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10837 t = TREE_OPERAND (t, 0);
10838 if (TREE_TYPE (t) != ptrtype)
10839 t = build1 (NOP_EXPR, ptrtype, t);
10845 while (handled_component_p (base))
10846 base = TREE_OPERAND (base, 0);
10848 TREE_ADDRESSABLE (base) = 1;
10850 t = build1 (ADDR_EXPR, ptrtype, t);
10857 build_fold_addr_expr (tree t)
10859 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10862 /* Builds an expression for an indirection through T, simplifying some
10866 build_fold_indirect_ref (tree t)
10868 tree type = TREE_TYPE (TREE_TYPE (t));
10873 if (TREE_CODE (sub) == ADDR_EXPR)
10875 tree op = TREE_OPERAND (sub, 0);
10876 tree optype = TREE_TYPE (op);
10878 if (lang_hooks.types_compatible_p (type, optype))
10880 /* *(foo *)&fooarray => fooarray[0] */
10881 else if (TREE_CODE (optype) == ARRAY_TYPE
10882 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10883 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10886 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10887 subtype = TREE_TYPE (sub);
10888 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10889 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10891 sub = build_fold_indirect_ref (sub);
10892 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10895 return build1 (INDIRECT_REF, type, t);
10898 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10899 whose result is ignored. The type of the returned tree need not be
10900 the same as the original expression. */
10903 fold_ignored_result (tree t)
10905 if (!TREE_SIDE_EFFECTS (t))
10906 return integer_zero_node;
10909 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10912 t = TREE_OPERAND (t, 0);
10916 case tcc_comparison:
10917 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10918 t = TREE_OPERAND (t, 0);
10919 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10920 t = TREE_OPERAND (t, 1);
10925 case tcc_expression:
10926 switch (TREE_CODE (t))
10928 case COMPOUND_EXPR:
10929 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10931 t = TREE_OPERAND (t, 0);
10935 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10936 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10938 t = TREE_OPERAND (t, 0);
10951 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10952 This can only be applied to objects of a sizetype. */
10955 round_up (tree value, int divisor)
10957 tree div = NULL_TREE;
10959 gcc_assert (divisor > 0);
10963 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10964 have to do anything. Only do this when we are not given a const,
10965 because in that case, this check is more expensive than just
10967 if (TREE_CODE (value) != INTEGER_CST)
10969 div = build_int_cst (TREE_TYPE (value), divisor);
10971 if (multiple_of_p (TREE_TYPE (value), value, div))
10975 /* If divisor is a power of two, simplify this to bit manipulation. */
10976 if (divisor == (divisor & -divisor))
10980 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10981 value = size_binop (PLUS_EXPR, value, t);
10982 t = build_int_cst (TREE_TYPE (value), -divisor);
10983 value = size_binop (BIT_AND_EXPR, value, t);
10988 div = build_int_cst (TREE_TYPE (value), divisor);
10989 value = size_binop (CEIL_DIV_EXPR, value, div);
10990 value = size_binop (MULT_EXPR, value, div);
10996 /* Likewise, but round down. */
10999 round_down (tree value, int divisor)
11001 tree div = NULL_TREE;
11003 gcc_assert (divisor > 0);
11007 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11008 have to do anything. Only do this when we are not given a const,
11009 because in that case, this check is more expensive than just
11011 if (TREE_CODE (value) != INTEGER_CST)
11013 div = build_int_cst (TREE_TYPE (value), divisor);
11015 if (multiple_of_p (TREE_TYPE (value), value, div))
11019 /* If divisor is a power of two, simplify this to bit manipulation. */
11020 if (divisor == (divisor & -divisor))
11024 t = build_int_cst (TREE_TYPE (value), -divisor);
11025 value = size_binop (BIT_AND_EXPR, value, t);
11030 div = build_int_cst (TREE_TYPE (value), divisor);
11031 value = size_binop (FLOOR_DIV_EXPR, value, div);
11032 value = size_binop (MULT_EXPR, value, div);
11038 /* Returns the pointer to the base of the object addressed by EXP and
11039 extracts the information about the offset of the access, storing it
11040 to PBITPOS and POFFSET. */
11043 split_address_to_core_and_offset (tree exp,
11044 HOST_WIDE_INT *pbitpos, tree *poffset)
11047 enum machine_mode mode;
11048 int unsignedp, volatilep;
11049 HOST_WIDE_INT bitsize;
11051 if (TREE_CODE (exp) == ADDR_EXPR)
11053 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11054 poffset, &mode, &unsignedp, &volatilep,
11057 if (TREE_CODE (core) == INDIRECT_REF)
11058 core = TREE_OPERAND (core, 0);
11064 *poffset = NULL_TREE;
11070 /* Returns true if addresses of E1 and E2 differ by a constant, false
11071 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11074 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11077 HOST_WIDE_INT bitpos1, bitpos2;
11078 tree toffset1, toffset2, tdiff, type;
11080 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11081 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11083 if (bitpos1 % BITS_PER_UNIT != 0
11084 || bitpos2 % BITS_PER_UNIT != 0
11085 || !operand_equal_p (core1, core2, 0))
11088 if (toffset1 && toffset2)
11090 type = TREE_TYPE (toffset1);
11091 if (type != TREE_TYPE (toffset2))
11092 toffset2 = fold_convert (type, toffset2);
11094 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11095 if (!host_integerp (tdiff, 0))
11098 *diff = tree_low_cst (tdiff, 0);
11100 else if (toffset1 || toffset2)
11102 /* If only one of the offsets is non-constant, the difference cannot
11109 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;