1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
116 static tree fold_range_test (tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static tree fold_relational_hi_lo (enum tree_code *, const tree,
137 static bool tree_expr_nonzero_p (tree);
139 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
140 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
141 and SUM1. Then this yields nonzero if overflow occurred during the
144 Overflow occurs if A and B have the same sign, but A and SUM differ in
145 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
149 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
150 We do that by representing the two-word integer in 4 words, with only
151 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
152 number. The value of the word is LOWPART + HIGHPART * BASE. */
155 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
156 #define HIGHPART(x) \
157 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
158 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
160 /* Unpack a two-word integer into 4 words.
161 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
162 WORDS points to the array of HOST_WIDE_INTs. */
165 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
167 words[0] = LOWPART (low);
168 words[1] = HIGHPART (low);
169 words[2] = LOWPART (hi);
170 words[3] = HIGHPART (hi);
173 /* Pack an array of 4 words into a two-word integer.
174 WORDS points to the array of words.
175 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
181 *low = words[0] + words[1] * BASE;
182 *hi = words[2] + words[3] * BASE;
185 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
186 in overflow of the value, when >0 we are only interested in signed
187 overflow, for <0 we are interested in any overflow. OVERFLOWED
188 indicates whether overflow has already occurred. CONST_OVERFLOWED
189 indicates whether constant overflow has already occurred. We force
190 T's value to be within range of T's type (by setting to 0 or 1 all
191 the bits outside the type's range). We set TREE_OVERFLOWED if,
192 OVERFLOWED is nonzero,
193 or OVERFLOWABLE is >0 and signed overflow occurs
194 or OVERFLOWABLE is <0 and any overflow occurs
195 We set TREE_CONSTANT_OVERFLOWED if,
196 CONST_OVERFLOWED is nonzero
197 or we set TREE_OVERFLOWED.
198 We return either the original T, or a copy. */
201 force_fit_type (tree t, int overflowable,
202 bool overflowed, bool overflowed_const)
204 unsigned HOST_WIDE_INT low;
207 int sign_extended_type;
209 gcc_assert (TREE_CODE (t) == INTEGER_CST);
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* Size types *are* sign extended. */
220 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
221 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
222 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
224 /* First clear all bits that are beyond the type's precision. */
226 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
228 else if (prec > HOST_BITS_PER_WIDE_INT)
229 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 low &= ~((HOST_WIDE_INT) (-1) << prec);
237 if (!sign_extended_type)
238 /* No sign extension */;
239 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
240 /* Correct width already. */;
241 else if (prec > HOST_BITS_PER_WIDE_INT)
243 /* Sign extend top half? */
244 if (high & ((unsigned HOST_WIDE_INT)1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
248 else if (prec == HOST_BITS_PER_WIDE_INT)
250 if ((HOST_WIDE_INT)low < 0)
255 /* Sign extend bottom half? */
256 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
259 low |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value changed, return a new node. */
264 if (overflowed || overflowed_const
265 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
267 t = build_int_cst_wide (TREE_TYPE (t), low, high);
271 || (overflowable > 0 && sign_extended_type))
274 TREE_OVERFLOW (t) = 1;
275 TREE_CONSTANT_OVERFLOW (t) = 1;
277 else if (overflowed_const)
280 TREE_CONSTANT_OVERFLOW (t) = 1;
287 /* Add two doubleword integers with doubleword result.
288 Each argument is given as two `HOST_WIDE_INT' pieces.
289 One argument is L1 and H1; the other, L2 and H2.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
294 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
295 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
297 unsigned HOST_WIDE_INT l;
301 h = h1 + h2 + (l < l1);
305 return OVERFLOW_SUM_SIGN (h1, h2, h);
308 /* Negate a doubleword integer with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
311 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
314 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
315 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
321 return (*hv & h1) < 0;
331 /* Multiply two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows, assuming it's signed.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
342 HOST_WIDE_INT arg1[4];
343 HOST_WIDE_INT arg2[4];
344 HOST_WIDE_INT prod[4 * 2];
345 unsigned HOST_WIDE_INT carry;
347 unsigned HOST_WIDE_INT toplow, neglow;
348 HOST_WIDE_INT tophigh, neghigh;
350 encode (arg1, l1, h1);
351 encode (arg2, l2, h2);
353 memset (prod, 0, sizeof prod);
355 for (i = 0; i < 4; i++)
358 for (j = 0; j < 4; j++)
361 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
362 carry += arg1[i] * arg2[j];
363 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
365 prod[k] = LOWPART (carry);
366 carry = HIGHPART (carry);
371 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
373 /* Check for overflow by calculating the top half of the answer in full;
374 it should agree with the low half's sign bit. */
375 decode (prod + 4, &toplow, &tophigh);
378 neg_double (l2, h2, &neglow, &neghigh);
379 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 neg_double (l1, h1, &neglow, &neghigh);
384 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
386 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
389 /* Shift the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Shift right if COUNT is negative.
392 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
396 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
397 HOST_WIDE_INT count, unsigned int prec,
398 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
400 unsigned HOST_WIDE_INT signmask;
404 rshift_double (l1, h1, -count, prec, lv, hv, arith);
408 if (SHIFT_COUNT_TRUNCATED)
411 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
413 /* Shifting by the host word size is undefined according to the
414 ANSI standard, so we must handle this as a special case. */
418 else if (count >= HOST_BITS_PER_WIDE_INT)
420 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
425 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
426 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
430 /* Sign extend all bits that are beyond the precision. */
432 signmask = -((prec > HOST_BITS_PER_WIDE_INT
433 ? ((unsigned HOST_WIDE_INT) *hv
434 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
435 : (*lv >> (prec - 1))) & 1);
437 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
439 else if (prec >= HOST_BITS_PER_WIDE_INT)
441 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
442 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
447 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
448 *lv |= signmask << prec;
452 /* Shift the doubleword integer in L1, H1 right by COUNT places
453 keeping only PREC bits of result. COUNT must be positive.
454 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
455 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
458 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
459 HOST_WIDE_INT count, unsigned int prec,
460 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
463 unsigned HOST_WIDE_INT signmask;
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
469 if (SHIFT_COUNT_TRUNCATED)
472 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
474 /* Shifting by the host word size is undefined according to the
475 ANSI standard, so we must handle this as a special case. */
479 else if (count >= HOST_BITS_PER_WIDE_INT)
482 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
488 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
491 /* Zero / sign extend all bits that are beyond the precision. */
493 if (count >= (HOST_WIDE_INT)prec)
498 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
500 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
502 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
503 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
508 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
509 *lv |= signmask << (prec - count);
513 /* Rotate the doubleword integer in L1, H1 left by COUNT places
514 keeping only PREC bits of result.
515 Rotate right if COUNT is negative.
516 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
519 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
520 HOST_WIDE_INT count, unsigned int prec,
521 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
523 unsigned HOST_WIDE_INT s1l, s2l;
524 HOST_WIDE_INT s1h, s2h;
530 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
531 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
536 /* Rotate the doubleword integer in L1, H1 left by COUNT places
537 keeping only PREC bits of result. COUNT must be positive.
538 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
541 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
542 HOST_WIDE_INT count, unsigned int prec,
543 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
545 unsigned HOST_WIDE_INT s1l, s2l;
546 HOST_WIDE_INT s1h, s2h;
552 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
553 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
558 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
559 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
560 CODE is a tree code for a kind of division, one of
561 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 It controls how the quotient is rounded to an integer.
564 Return nonzero if the operation overflows.
565 UNS nonzero says do unsigned division. */
568 div_and_round_double (enum tree_code code, int uns,
569 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig,
571 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig,
573 unsigned HOST_WIDE_INT *lquo,
574 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
578 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
579 HOST_WIDE_INT den[4], quo[4];
581 unsigned HOST_WIDE_INT work;
582 unsigned HOST_WIDE_INT carry = 0;
583 unsigned HOST_WIDE_INT lnum = lnum_orig;
584 HOST_WIDE_INT hnum = hnum_orig;
585 unsigned HOST_WIDE_INT lden = lden_orig;
586 HOST_WIDE_INT hden = hden_orig;
589 if (hden == 0 && lden == 0)
590 overflow = 1, lden = 1;
592 /* Calculate quotient sign and convert operands to unsigned. */
598 /* (minimum integer) / (-1) is the only overflow case. */
599 if (neg_double (lnum, hnum, &lnum, &hnum)
600 && ((HOST_WIDE_INT) lden & hden) == -1)
606 neg_double (lden, hden, &lden, &hden);
610 if (hnum == 0 && hden == 0)
611 { /* single precision */
613 /* This unsigned division rounds toward zero. */
619 { /* trivial case: dividend < divisor */
620 /* hden != 0 already checked. */
627 memset (quo, 0, sizeof quo);
629 memset (num, 0, sizeof num); /* to zero 9th element */
630 memset (den, 0, sizeof den);
632 encode (num, lnum, hnum);
633 encode (den, lden, hden);
635 /* Special code for when the divisor < BASE. */
636 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
638 /* hnum != 0 already checked. */
639 for (i = 4 - 1; i >= 0; i--)
641 work = num[i] + carry * BASE;
642 quo[i] = work / lden;
648 /* Full double precision division,
649 with thanks to Don Knuth's "Seminumerical Algorithms". */
650 int num_hi_sig, den_hi_sig;
651 unsigned HOST_WIDE_INT quo_est, scale;
653 /* Find the highest nonzero divisor digit. */
654 for (i = 4 - 1;; i--)
661 /* Insure that the first digit of the divisor is at least BASE/2.
662 This is required by the quotient digit estimation algorithm. */
664 scale = BASE / (den[den_hi_sig] + 1);
666 { /* scale divisor and dividend */
668 for (i = 0; i <= 4 - 1; i++)
670 work = (num[i] * scale) + carry;
671 num[i] = LOWPART (work);
672 carry = HIGHPART (work);
677 for (i = 0; i <= 4 - 1; i++)
679 work = (den[i] * scale) + carry;
680 den[i] = LOWPART (work);
681 carry = HIGHPART (work);
682 if (den[i] != 0) den_hi_sig = i;
689 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
691 /* Guess the next quotient digit, quo_est, by dividing the first
692 two remaining dividend digits by the high order quotient digit.
693 quo_est is never low and is at most 2 high. */
694 unsigned HOST_WIDE_INT tmp;
696 num_hi_sig = i + den_hi_sig + 1;
697 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
698 if (num[num_hi_sig] != den[den_hi_sig])
699 quo_est = work / den[den_hi_sig];
703 /* Refine quo_est so it's usually correct, and at most one high. */
704 tmp = work - quo_est * den[den_hi_sig];
706 && (den[den_hi_sig - 1] * quo_est
707 > (tmp * BASE + num[num_hi_sig - 2])))
710 /* Try QUO_EST as the quotient digit, by multiplying the
711 divisor by QUO_EST and subtracting from the remaining dividend.
712 Keep in mind that QUO_EST is the I - 1st digit. */
715 for (j = 0; j <= den_hi_sig; j++)
717 work = quo_est * den[j] + carry;
718 carry = HIGHPART (work);
719 work = num[i + j] - LOWPART (work);
720 num[i + j] = LOWPART (work);
721 carry += HIGHPART (work) != 0;
724 /* If quo_est was high by one, then num[i] went negative and
725 we need to correct things. */
726 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
729 carry = 0; /* add divisor back in */
730 for (j = 0; j <= den_hi_sig; j++)
732 work = num[i + j] + den[j] + carry;
733 carry = HIGHPART (work);
734 num[i + j] = LOWPART (work);
737 num [num_hi_sig] += carry;
740 /* Store the quotient digit. */
745 decode (quo, lquo, hquo);
748 /* If result is negative, make it so. */
750 neg_double (*lquo, *hquo, lquo, hquo);
752 /* Compute trial remainder: rem = num - (quo * den) */
753 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
754 neg_double (*lrem, *hrem, lrem, hrem);
755 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
760 case TRUNC_MOD_EXPR: /* round toward zero */
761 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
765 case FLOOR_MOD_EXPR: /* round toward negative infinity */
766 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
769 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
777 case CEIL_MOD_EXPR: /* round toward positive infinity */
778 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
788 case ROUND_MOD_EXPR: /* round to closest integer */
790 unsigned HOST_WIDE_INT labs_rem = *lrem;
791 HOST_WIDE_INT habs_rem = *hrem;
792 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
793 HOST_WIDE_INT habs_den = hden, htwice;
795 /* Get absolute values. */
797 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 neg_double (lden, hden, &labs_den, &habs_den);
801 /* If (2 * abs (lrem) >= abs (lden)) */
802 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
803 labs_rem, habs_rem, <wice, &htwice);
805 if (((unsigned HOST_WIDE_INT) habs_den
806 < (unsigned HOST_WIDE_INT) htwice)
807 || (((unsigned HOST_WIDE_INT) habs_den
808 == (unsigned HOST_WIDE_INT) htwice)
809 && (labs_den < ltwice)))
813 add_double (*lquo, *hquo,
814 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 /* Compute true remainder: rem = num - (quo * den) */
830 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
831 neg_double (*lrem, *hrem, lrem, hrem);
832 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
836 /* Return true if built-in mathematical function specified by CODE
837 preserves the sign of it argument, i.e. -f(x) == f(-x). */
840 negate_mathfn_p (enum built_in_function code)
864 /* Check whether we may negate an integer constant T without causing
868 may_negate_without_overflow_p (tree t)
870 unsigned HOST_WIDE_INT val;
874 gcc_assert (TREE_CODE (t) == INTEGER_CST);
876 type = TREE_TYPE (t);
877 if (TYPE_UNSIGNED (type))
880 prec = TYPE_PRECISION (type);
881 if (prec > HOST_BITS_PER_WIDE_INT)
883 if (TREE_INT_CST_LOW (t) != 0)
885 prec -= HOST_BITS_PER_WIDE_INT;
886 val = TREE_INT_CST_HIGH (t);
889 val = TREE_INT_CST_LOW (t);
890 if (prec < HOST_BITS_PER_WIDE_INT)
891 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
892 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
895 /* Determine whether an expression T can be cheaply negated using
896 the function negate_expr. */
899 negate_expr_p (tree t)
906 type = TREE_TYPE (t);
909 switch (TREE_CODE (t))
912 if (TYPE_UNSIGNED (type) || ! flag_trapv)
915 /* Check that -CST will not overflow type. */
916 return may_negate_without_overflow_p (t);
923 return negate_expr_p (TREE_REALPART (t))
924 && negate_expr_p (TREE_IMAGPART (t));
927 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
929 /* -(A + B) -> (-B) - A. */
930 if (negate_expr_p (TREE_OPERAND (t, 1))
931 && reorder_operands_p (TREE_OPERAND (t, 0),
932 TREE_OPERAND (t, 1)))
934 /* -(A + B) -> (-A) - B. */
935 return negate_expr_p (TREE_OPERAND (t, 0));
938 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
939 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
940 && reorder_operands_p (TREE_OPERAND (t, 0),
941 TREE_OPERAND (t, 1));
944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
950 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
951 return negate_expr_p (TREE_OPERAND (t, 1))
952 || negate_expr_p (TREE_OPERAND (t, 0));
956 /* Negate -((double)float) as (double)(-float). */
957 if (TREE_CODE (type) == REAL_TYPE)
959 tree tem = strip_float_extensions (t);
961 return negate_expr_p (tem);
966 /* Negate -f(x) as f(-x). */
967 if (negate_mathfn_p (builtin_mathfn_code (t)))
968 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
972 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
973 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
975 tree op1 = TREE_OPERAND (t, 1);
976 if (TREE_INT_CST_HIGH (op1) == 0
977 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
978 == TREE_INT_CST_LOW (op1))
989 /* Given T, an expression, return the negation of T. Allow for T to be
990 null, in which case return null. */
1001 type = TREE_TYPE (t);
1002 STRIP_SIGN_NOPS (t);
1004 switch (TREE_CODE (t))
1007 tem = fold_negate_const (t, type);
1008 if (! TREE_OVERFLOW (tem)
1009 || TYPE_UNSIGNED (type)
1015 tem = fold_negate_const (t, type);
1016 /* Two's complement FP formats, such as c4x, may overflow. */
1017 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1018 return fold_convert (type, tem);
1023 tree rpart = negate_expr (TREE_REALPART (t));
1024 tree ipart = negate_expr (TREE_IMAGPART (t));
1026 if ((TREE_CODE (rpart) == REAL_CST
1027 && TREE_CODE (ipart) == REAL_CST)
1028 || (TREE_CODE (rpart) == INTEGER_CST
1029 && TREE_CODE (ipart) == INTEGER_CST))
1030 return build_complex (type, rpart, ipart);
1035 return fold_convert (type, TREE_OPERAND (t, 0));
1038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1040 /* -(A + B) -> (-B) - A. */
1041 if (negate_expr_p (TREE_OPERAND (t, 1))
1042 && reorder_operands_p (TREE_OPERAND (t, 0),
1043 TREE_OPERAND (t, 1)))
1045 tem = negate_expr (TREE_OPERAND (t, 1));
1046 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1047 tem, TREE_OPERAND (t, 0)));
1048 return fold_convert (type, tem);
1051 /* -(A + B) -> (-A) - B. */
1052 if (negate_expr_p (TREE_OPERAND (t, 0)))
1054 tem = negate_expr (TREE_OPERAND (t, 0));
1055 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1056 tem, TREE_OPERAND (t, 1)));
1057 return fold_convert (type, tem);
1063 /* - (A - B) -> B - A */
1064 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1066 return fold_convert (type,
1067 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1068 TREE_OPERAND (t, 1),
1069 TREE_OPERAND (t, 0))));
1073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1079 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1081 tem = TREE_OPERAND (t, 1);
1082 if (negate_expr_p (tem))
1083 return fold_convert (type,
1084 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1085 TREE_OPERAND (t, 0),
1086 negate_expr (tem))));
1087 tem = TREE_OPERAND (t, 0);
1088 if (negate_expr_p (tem))
1089 return fold_convert (type,
1090 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1092 TREE_OPERAND (t, 1))));
1097 /* Convert -((double)float) into (double)(-float). */
1098 if (TREE_CODE (type) == REAL_TYPE)
1100 tem = strip_float_extensions (t);
1101 if (tem != t && negate_expr_p (tem))
1102 return fold_convert (type, negate_expr (tem));
1107 /* Negate -f(x) as f(-x). */
1108 if (negate_mathfn_p (builtin_mathfn_code (t))
1109 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1111 tree fndecl, arg, arglist;
1113 fndecl = get_callee_fndecl (t);
1114 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1115 arglist = build_tree_list (NULL_TREE, arg);
1116 return build_function_call_expr (fndecl, arglist);
1121 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1122 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1124 tree op1 = TREE_OPERAND (t, 1);
1125 if (TREE_INT_CST_HIGH (op1) == 0
1126 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1127 == TREE_INT_CST_LOW (op1))
1129 tree ntype = TYPE_UNSIGNED (type)
1130 ? lang_hooks.types.signed_type (type)
1131 : lang_hooks.types.unsigned_type (type);
1132 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1133 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1134 return fold_convert (type, temp);
1143 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1144 return fold_convert (type, tem);
1147 /* Split a tree IN into a constant, literal and variable parts that could be
1148 combined with CODE to make IN. "constant" means an expression with
1149 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1150 commutative arithmetic operation. Store the constant part into *CONP,
1151 the literal in *LITP and return the variable part. If a part isn't
1152 present, set it to null. If the tree does not decompose in this way,
1153 return the entire tree as the variable part and the other parts as null.
1155 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1156 case, we negate an operand that was subtracted. Except if it is a
1157 literal for which we use *MINUS_LITP instead.
1159 If NEGATE_P is true, we are negating all of IN, again except a literal
1160 for which we use *MINUS_LITP instead.
1162 If IN is itself a literal or constant, return it as appropriate.
1164 Note that we do not guarantee that any of the three values will be the
1165 same type as IN, but they will have the same signedness and mode. */
1168 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1169 tree *minus_litp, int negate_p)
1177 /* Strip any conversions that don't change the machine mode or signedness. */
1178 STRIP_SIGN_NOPS (in);
1180 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1182 else if (TREE_CODE (in) == code
1183 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1184 /* We can associate addition and subtraction together (even
1185 though the C standard doesn't say so) for integers because
1186 the value is not affected. For reals, the value might be
1187 affected, so we can't. */
1188 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1189 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1191 tree op0 = TREE_OPERAND (in, 0);
1192 tree op1 = TREE_OPERAND (in, 1);
1193 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1194 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1196 /* First see if either of the operands is a literal, then a constant. */
1197 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1198 *litp = op0, op0 = 0;
1199 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1200 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1202 if (op0 != 0 && TREE_CONSTANT (op0))
1203 *conp = op0, op0 = 0;
1204 else if (op1 != 0 && TREE_CONSTANT (op1))
1205 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1207 /* If we haven't dealt with either operand, this is not a case we can
1208 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1209 if (op0 != 0 && op1 != 0)
1214 var = op1, neg_var_p = neg1_p;
1216 /* Now do any needed negations. */
1218 *minus_litp = *litp, *litp = 0;
1220 *conp = negate_expr (*conp);
1222 var = negate_expr (var);
1224 else if (TREE_CONSTANT (in))
1232 *minus_litp = *litp, *litp = 0;
1233 else if (*minus_litp)
1234 *litp = *minus_litp, *minus_litp = 0;
1235 *conp = negate_expr (*conp);
1236 var = negate_expr (var);
1242 /* Re-associate trees split by the above function. T1 and T2 are either
1243 expressions to associate or null. Return the new expression, if any. If
1244 we build an operation, do it in TYPE and with CODE. */
1247 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1254 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1255 try to fold this since we will have infinite recursion. But do
1256 deal with any NEGATE_EXPRs. */
1257 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1258 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1260 if (code == PLUS_EXPR)
1262 if (TREE_CODE (t1) == NEGATE_EXPR)
1263 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1264 fold_convert (type, TREE_OPERAND (t1, 0)));
1265 else if (TREE_CODE (t2) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1267 fold_convert (type, TREE_OPERAND (t2, 0)));
1268 else if (integer_zerop (t2))
1269 return fold_convert (type, t1);
1271 else if (code == MINUS_EXPR)
1273 if (integer_zerop (t2))
1274 return fold_convert (type, t1);
1277 return build2 (code, type, fold_convert (type, t1),
1278 fold_convert (type, t2));
1281 return fold (build2 (code, type, fold_convert (type, t1),
1282 fold_convert (type, t2)));
1285 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1286 to produce a new constant.
1288 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1291 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1293 unsigned HOST_WIDE_INT int1l, int2l;
1294 HOST_WIDE_INT int1h, int2h;
1295 unsigned HOST_WIDE_INT low;
1297 unsigned HOST_WIDE_INT garbagel;
1298 HOST_WIDE_INT garbageh;
1300 tree type = TREE_TYPE (arg1);
1301 int uns = TYPE_UNSIGNED (type);
1303 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1305 int no_overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1315 low = int1l | int2l, hi = int1h | int2h;
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1323 low = int1l & int2l, hi = int1h & int2h;
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1345 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1349 neg_double (int2l, int2h, &low, &hi);
1350 add_double (int1l, int1h, low, hi, &low, &hi);
1351 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1355 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1358 case TRUNC_DIV_EXPR:
1359 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1360 case EXACT_DIV_EXPR:
1361 /* This is a shortcut for a common special case. */
1362 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1363 && ! TREE_CONSTANT_OVERFLOW (arg1)
1364 && ! TREE_CONSTANT_OVERFLOW (arg2)
1365 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1367 if (code == CEIL_DIV_EXPR)
1370 low = int1l / int2l, hi = 0;
1374 /* ... fall through ... */
1376 case ROUND_DIV_EXPR:
1377 if (int2h == 0 && int2l == 1)
1379 low = int1l, hi = int1h;
1382 if (int1l == int2l && int1h == int2h
1383 && ! (int1l == 0 && int1h == 0))
1388 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1389 &low, &hi, &garbagel, &garbageh);
1392 case TRUNC_MOD_EXPR:
1393 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1394 /* This is a shortcut for a common special case. */
1395 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1396 && ! TREE_CONSTANT_OVERFLOW (arg1)
1397 && ! TREE_CONSTANT_OVERFLOW (arg2)
1398 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400 if (code == CEIL_MOD_EXPR)
1402 low = int1l % int2l, hi = 0;
1406 /* ... fall through ... */
1408 case ROUND_MOD_EXPR:
1409 overflow = div_and_round_double (code, uns,
1410 int1l, int1h, int2l, int2h,
1411 &garbagel, &garbageh, &low, &hi);
1417 low = (((unsigned HOST_WIDE_INT) int1h
1418 < (unsigned HOST_WIDE_INT) int2h)
1419 || (((unsigned HOST_WIDE_INT) int1h
1420 == (unsigned HOST_WIDE_INT) int2h)
1423 low = (int1h < int2h
1424 || (int1h == int2h && int1l < int2l));
1426 if (low == (code == MIN_EXPR))
1427 low = int1l, hi = int1h;
1429 low = int2l, hi = int2h;
1436 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1440 /* Propagate overflow flags ourselves. */
1441 if (((!uns || is_sizetype) && overflow)
1442 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1445 TREE_OVERFLOW (t) = 1;
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1448 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1451 TREE_CONSTANT_OVERFLOW (t) = 1;
1455 t = force_fit_type (t, 1,
1456 ((!uns || is_sizetype) && overflow)
1457 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1458 TREE_CONSTANT_OVERFLOW (arg1)
1459 | TREE_CONSTANT_OVERFLOW (arg2));
1464 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1465 constant. We assume ARG1 and ARG2 have the same data type, or at least
1466 are the same kind of constant and the same machine mode.
1468 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1471 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1476 if (TREE_CODE (arg1) == INTEGER_CST)
1477 return int_const_binop (code, arg1, arg2, notrunc);
1479 if (TREE_CODE (arg1) == REAL_CST)
1481 enum machine_mode mode;
1484 REAL_VALUE_TYPE value;
1487 d1 = TREE_REAL_CST (arg1);
1488 d2 = TREE_REAL_CST (arg2);
1490 type = TREE_TYPE (arg1);
1491 mode = TYPE_MODE (type);
1493 /* Don't perform operation if we honor signaling NaNs and
1494 either operand is a NaN. */
1495 if (HONOR_SNANS (mode)
1496 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1499 /* Don't perform operation if it would raise a division
1500 by zero exception. */
1501 if (code == RDIV_EXPR
1502 && REAL_VALUES_EQUAL (d2, dconst0)
1503 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1506 /* If either operand is a NaN, just return it. Otherwise, set up
1507 for floating-point trap; we return an overflow. */
1508 if (REAL_VALUE_ISNAN (d1))
1510 else if (REAL_VALUE_ISNAN (d2))
1513 REAL_ARITHMETIC (value, code, d1, d2);
1515 t = build_real (type, real_value_truncate (mode, value));
1517 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1518 TREE_CONSTANT_OVERFLOW (t)
1520 | TREE_CONSTANT_OVERFLOW (arg1)
1521 | TREE_CONSTANT_OVERFLOW (arg2);
1524 if (TREE_CODE (arg1) == COMPLEX_CST)
1526 tree type = TREE_TYPE (arg1);
1527 tree r1 = TREE_REALPART (arg1);
1528 tree i1 = TREE_IMAGPART (arg1);
1529 tree r2 = TREE_REALPART (arg2);
1530 tree i2 = TREE_IMAGPART (arg2);
1536 t = build_complex (type,
1537 const_binop (PLUS_EXPR, r1, r2, notrunc),
1538 const_binop (PLUS_EXPR, i1, i2, notrunc));
1542 t = build_complex (type,
1543 const_binop (MINUS_EXPR, r1, r2, notrunc),
1544 const_binop (MINUS_EXPR, i1, i2, notrunc));
1548 t = build_complex (type,
1549 const_binop (MINUS_EXPR,
1550 const_binop (MULT_EXPR,
1552 const_binop (MULT_EXPR,
1555 const_binop (PLUS_EXPR,
1556 const_binop (MULT_EXPR,
1558 const_binop (MULT_EXPR,
1566 = const_binop (PLUS_EXPR,
1567 const_binop (MULT_EXPR, r2, r2, notrunc),
1568 const_binop (MULT_EXPR, i2, i2, notrunc),
1571 t = build_complex (type,
1573 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1574 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1575 const_binop (PLUS_EXPR,
1576 const_binop (MULT_EXPR, r1, r2,
1578 const_binop (MULT_EXPR, i1, i2,
1581 magsquared, notrunc),
1583 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1584 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR, i1, r2,
1588 const_binop (MULT_EXPR, r1, i2,
1591 magsquared, notrunc));
1603 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1604 indicates which particular sizetype to create. */
1607 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1609 return build_int_cst (sizetype_tab[(int) kind], number);
1612 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1613 is a tree code. The type of the result is taken from the operands.
1614 Both must be the same type integer type and it must be a size type.
1615 If the operands are constant, so is the result. */
1618 size_binop (enum tree_code code, tree arg0, tree arg1)
1620 tree type = TREE_TYPE (arg0);
1622 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1623 && type == TREE_TYPE (arg1));
1625 /* Handle the special case of two integer constants faster. */
1626 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1628 /* And some specific cases even faster than that. */
1629 if (code == PLUS_EXPR && integer_zerop (arg0))
1631 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1632 && integer_zerop (arg1))
1634 else if (code == MULT_EXPR && integer_onep (arg0))
1637 /* Handle general case of two integer constants. */
1638 return int_const_binop (code, arg0, arg1, 0);
1641 if (arg0 == error_mark_node || arg1 == error_mark_node)
1642 return error_mark_node;
1644 return fold (build2 (code, type, arg0, arg1));
1647 /* Given two values, either both of sizetype or both of bitsizetype,
1648 compute the difference between the two values. Return the value
1649 in signed type corresponding to the type of the operands. */
1652 size_diffop (tree arg0, tree arg1)
1654 tree type = TREE_TYPE (arg0);
1657 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1658 && type == TREE_TYPE (arg1));
1660 /* If the type is already signed, just do the simple thing. */
1661 if (!TYPE_UNSIGNED (type))
1662 return size_binop (MINUS_EXPR, arg0, arg1);
1664 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1666 /* If either operand is not a constant, do the conversions to the signed
1667 type and subtract. The hardware will do the right thing with any
1668 overflow in the subtraction. */
1669 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1670 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1671 fold_convert (ctype, arg1));
1673 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1674 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1675 overflow) and negate (which can't either). Special-case a result
1676 of zero while we're here. */
1677 if (tree_int_cst_equal (arg0, arg1))
1678 return fold_convert (ctype, integer_zero_node);
1679 else if (tree_int_cst_lt (arg1, arg0))
1680 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1682 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1683 fold_convert (ctype, size_binop (MINUS_EXPR,
1687 /* A subroutine of fold_convert_const handling conversions of an
1688 INTEGER_CST to another integer type. */
1691 fold_convert_const_int_from_int (tree type, tree arg1)
1695 /* Given an integer constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1698 TREE_INT_CST_HIGH (arg1));
1700 t = force_fit_type (t,
1701 /* Don't set the overflow when
1702 converting a pointer */
1703 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1704 (TREE_INT_CST_HIGH (arg1) < 0
1705 && (TYPE_UNSIGNED (type)
1706 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1707 | TREE_OVERFLOW (arg1),
1708 TREE_CONSTANT_OVERFLOW (arg1));
1713 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1714 to an integer type. */
1717 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1722 /* The following code implements the floating point to integer
1723 conversion rules required by the Java Language Specification,
1724 that IEEE NaNs are mapped to zero and values that overflow
1725 the target precision saturate, i.e. values greater than
1726 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1727 are mapped to INT_MIN. These semantics are allowed by the
1728 C and C++ standards that simply state that the behavior of
1729 FP-to-integer conversion is unspecified upon overflow. */
1731 HOST_WIDE_INT high, low;
1733 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1737 case FIX_TRUNC_EXPR:
1738 real_trunc (&r, VOIDmode, &x);
1742 real_ceil (&r, VOIDmode, &x);
1745 case FIX_FLOOR_EXPR:
1746 real_floor (&r, VOIDmode, &x);
1749 case FIX_ROUND_EXPR:
1750 real_round (&r, VOIDmode, &x);
1757 /* If R is NaN, return zero and show we have an overflow. */
1758 if (REAL_VALUE_ISNAN (r))
1765 /* See if R is less than the lower bound or greater than the
1770 tree lt = TYPE_MIN_VALUE (type);
1771 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1772 if (REAL_VALUES_LESS (r, l))
1775 high = TREE_INT_CST_HIGH (lt);
1776 low = TREE_INT_CST_LOW (lt);
1782 tree ut = TYPE_MAX_VALUE (type);
1785 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1786 if (REAL_VALUES_LESS (u, r))
1789 high = TREE_INT_CST_HIGH (ut);
1790 low = TREE_INT_CST_LOW (ut);
1796 REAL_VALUE_TO_INT (&low, &high, r);
1798 t = build_int_cst_wide (type, low, high);
1800 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1801 TREE_CONSTANT_OVERFLOW (arg1));
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to another floating point type. */
1809 fold_convert_const_real_from_real (tree type, tree arg1)
1813 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1815 /* We make a copy of ARG1 so that we don't modify an
1816 existing constant tree. */
1817 t = copy_node (arg1);
1818 TREE_TYPE (t) = type;
1822 t = build_real (type,
1823 real_value_truncate (TYPE_MODE (type),
1824 TREE_REAL_CST (arg1)));
1826 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1827 TREE_CONSTANT_OVERFLOW (t)
1828 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1832 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1833 type TYPE. If no simplification can be done return NULL_TREE. */
1836 fold_convert_const (enum tree_code code, tree type, tree arg1)
1838 if (TREE_TYPE (arg1) == type)
1841 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1843 if (TREE_CODE (arg1) == INTEGER_CST)
1844 return fold_convert_const_int_from_int (type, arg1);
1845 else if (TREE_CODE (arg1) == REAL_CST)
1846 return fold_convert_const_int_from_real (code, type, arg1);
1848 else if (TREE_CODE (type) == REAL_TYPE)
1850 if (TREE_CODE (arg1) == INTEGER_CST)
1851 return build_real_from_int_cst (type, arg1);
1852 if (TREE_CODE (arg1) == REAL_CST)
1853 return fold_convert_const_real_from_real (type, arg1);
1858 /* Construct a vector of zero elements of vector type TYPE. */
1861 build_zero_vector (tree type)
1866 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1867 units = TYPE_VECTOR_SUBPARTS (type);
1870 for (i = 0; i < units; i++)
1871 list = tree_cons (NULL_TREE, elem, list);
1872 return build_vector (type, list);
1875 /* Convert expression ARG to type TYPE. Used by the middle-end for
1876 simple conversions in preference to calling the front-end's convert. */
1879 fold_convert (tree type, tree arg)
1881 tree orig = TREE_TYPE (arg);
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return error_mark_node;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1893 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1894 TYPE_MAIN_VARIANT (orig)))
1895 return fold (build1 (NOP_EXPR, type, arg));
1897 switch (TREE_CODE (type))
1899 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1900 case POINTER_TYPE: case REFERENCE_TYPE:
1902 if (TREE_CODE (arg) == INTEGER_CST)
1904 tem = fold_convert_const (NOP_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1909 || TREE_CODE (orig) == OFFSET_TYPE)
1910 return fold (build1 (NOP_EXPR, type, arg));
1911 if (TREE_CODE (orig) == COMPLEX_TYPE)
1913 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1914 return fold_convert (type, tem);
1916 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1917 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1918 return fold (build1 (NOP_EXPR, type, arg));
1921 if (TREE_CODE (arg) == INTEGER_CST)
1923 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1924 if (tem != NULL_TREE)
1927 else if (TREE_CODE (arg) == REAL_CST)
1929 tem = fold_convert_const (NOP_EXPR, type, arg);
1930 if (tem != NULL_TREE)
1934 switch (TREE_CODE (orig))
1936 case INTEGER_TYPE: case CHAR_TYPE:
1937 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1938 case POINTER_TYPE: case REFERENCE_TYPE:
1939 return fold (build1 (FLOAT_EXPR, type, arg));
1942 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1946 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1947 return fold_convert (type, tem);
1954 switch (TREE_CODE (orig))
1956 case INTEGER_TYPE: case CHAR_TYPE:
1957 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1958 case POINTER_TYPE: case REFERENCE_TYPE:
1960 return build2 (COMPLEX_EXPR, type,
1961 fold_convert (TREE_TYPE (type), arg),
1962 fold_convert (TREE_TYPE (type), integer_zero_node));
1967 if (TREE_CODE (arg) == COMPLEX_EXPR)
1969 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1970 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1971 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1974 arg = save_expr (arg);
1975 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1976 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1977 rpart = fold_convert (TREE_TYPE (type), rpart);
1978 ipart = fold_convert (TREE_TYPE (type), ipart);
1979 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1987 if (integer_zerop (arg))
1988 return build_zero_vector (type);
1989 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1990 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1991 || TREE_CODE (orig) == VECTOR_TYPE);
1992 return fold (build1 (NOP_EXPR, type, arg));
1995 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2002 /* Return an expr equal to X but certainly not valid as an lvalue. */
2007 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2012 /* We only need to wrap lvalue tree codes. */
2013 switch (TREE_CODE (x))
2024 case ALIGN_INDIRECT_REF:
2025 case MISALIGNED_INDIRECT_REF:
2027 case ARRAY_RANGE_REF:
2033 case PREINCREMENT_EXPR:
2034 case PREDECREMENT_EXPR:
2036 case TRY_CATCH_EXPR:
2037 case WITH_CLEANUP_EXPR:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2053 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2059 int pedantic_lvalues;
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2065 pedantic_non_lvalue (tree x)
2067 if (pedantic_lvalues)
2068 return non_lvalue (x);
2073 /* Given a tree comparison code, return the code that is the logical inverse
2074 of the given code. It is not safe to do this for floating-point
2075 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2076 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2078 static enum tree_code
2079 invert_tree_comparison (enum tree_code code, bool honor_nans)
2081 if (honor_nans && flag_trapping_math)
2091 return honor_nans ? UNLE_EXPR : LE_EXPR;
2093 return honor_nans ? UNLT_EXPR : LT_EXPR;
2095 return honor_nans ? UNGE_EXPR : GE_EXPR;
2097 return honor_nans ? UNGT_EXPR : GT_EXPR;
2111 return UNORDERED_EXPR;
2112 case UNORDERED_EXPR:
2113 return ORDERED_EXPR;
2119 /* Similar, but return the comparison that results if the operands are
2120 swapped. This is safe for floating-point. */
2123 swap_tree_comparison (enum tree_code code)
2144 /* Convert a comparison tree code from an enum tree_code representation
2145 into a compcode bit-based encoding. This function is the inverse of
2146 compcode_to_comparison. */
2148 static enum comparison_code
2149 comparison_to_compcode (enum tree_code code)
2166 return COMPCODE_ORD;
2167 case UNORDERED_EXPR:
2168 return COMPCODE_UNORD;
2170 return COMPCODE_UNLT;
2172 return COMPCODE_UNEQ;
2174 return COMPCODE_UNLE;
2176 return COMPCODE_UNGT;
2178 return COMPCODE_LTGT;
2180 return COMPCODE_UNGE;
2186 /* Convert a compcode bit-based encoding of a comparison operator back
2187 to GCC's enum tree_code representation. This function is the
2188 inverse of comparison_to_compcode. */
2190 static enum tree_code
2191 compcode_to_comparison (enum comparison_code code)
2208 return ORDERED_EXPR;
2209 case COMPCODE_UNORD:
2210 return UNORDERED_EXPR;
2228 /* Return a tree for the comparison which is the combination of
2229 doing the AND or OR (depending on CODE) of the two operations LCODE
2230 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2231 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2232 if this makes the transformation invalid. */
2235 combine_comparisons (enum tree_code code, enum tree_code lcode,
2236 enum tree_code rcode, tree truth_type,
2237 tree ll_arg, tree lr_arg)
2239 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2240 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2241 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2242 enum comparison_code compcode;
2246 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2247 compcode = lcompcode & rcompcode;
2250 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2251 compcode = lcompcode | rcompcode;
2260 /* Eliminate unordered comparisons, as well as LTGT and ORD
2261 which are not used unless the mode has NaNs. */
2262 compcode &= ~COMPCODE_UNORD;
2263 if (compcode == COMPCODE_LTGT)
2264 compcode = COMPCODE_NE;
2265 else if (compcode == COMPCODE_ORD)
2266 compcode = COMPCODE_TRUE;
2268 else if (flag_trapping_math)
2270 /* Check that the original operation and the optimized ones will trap
2271 under the same condition. */
2272 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2273 && (lcompcode != COMPCODE_EQ)
2274 && (lcompcode != COMPCODE_ORD);
2275 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2276 && (rcompcode != COMPCODE_EQ)
2277 && (rcompcode != COMPCODE_ORD);
2278 bool trap = (compcode & COMPCODE_UNORD) == 0
2279 && (compcode != COMPCODE_EQ)
2280 && (compcode != COMPCODE_ORD);
2282 /* In a short-circuited boolean expression the LHS might be
2283 such that the RHS, if evaluated, will never trap. For
2284 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2285 if neither x nor y is NaN. (This is a mixed blessing: for
2286 example, the expression above will never trap, hence
2287 optimizing it to x < y would be invalid). */
2288 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2289 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2292 /* If the comparison was short-circuited, and only the RHS
2293 trapped, we may now generate a spurious trap. */
2295 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2298 /* If we changed the conditions that cause a trap, we lose. */
2299 if ((ltrap || rtrap) != trap)
2303 if (compcode == COMPCODE_TRUE)
2304 return constant_boolean_node (true, truth_type);
2305 else if (compcode == COMPCODE_FALSE)
2306 return constant_boolean_node (false, truth_type);
2308 return fold (build2 (compcode_to_comparison (compcode),
2309 truth_type, ll_arg, lr_arg));
2312 /* Return nonzero if CODE is a tree code that represents a truth value. */
2315 truth_value_p (enum tree_code code)
2317 return (TREE_CODE_CLASS (code) == tcc_comparison
2318 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2319 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2320 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2356 /* If both types don't have the same signedness, then we can't consider
2357 them equal. We must check this before the STRIP_NOPS calls
2358 because they may change the signedness of the arguments. */
2359 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2365 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2366 /* This is needed for conversions and for COMPONENT_REF.
2367 Might as well play it safe and always test this. */
2368 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2369 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2370 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2373 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2374 We don't care about side effects in that case because the SAVE_EXPR
2375 takes care of that for us. In all other cases, two expressions are
2376 equal if they have no side effects. If we have two identical
2377 expressions with side effects that should be treated the same due
2378 to the only side effects being identical SAVE_EXPR's, that will
2379 be detected in the recursive calls below. */
2380 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2381 && (TREE_CODE (arg0) == SAVE_EXPR
2382 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2385 /* Next handle constant cases, those for which we can return 1 even
2386 if ONLY_CONST is set. */
2387 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2388 switch (TREE_CODE (arg0))
2391 return (! TREE_CONSTANT_OVERFLOW (arg0)
2392 && ! TREE_CONSTANT_OVERFLOW (arg1)
2393 && tree_int_cst_equal (arg0, arg1));
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2399 TREE_REAL_CST (arg1)));
2405 if (TREE_CONSTANT_OVERFLOW (arg0)
2406 || TREE_CONSTANT_OVERFLOW (arg1))
2409 v1 = TREE_VECTOR_CST_ELTS (arg0);
2410 v2 = TREE_VECTOR_CST_ELTS (arg1);
2413 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2416 v1 = TREE_CHAIN (v1);
2417 v2 = TREE_CHAIN (v2);
2424 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2426 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2430 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2431 && ! memcmp (TREE_STRING_POINTER (arg0),
2432 TREE_STRING_POINTER (arg1),
2433 TREE_STRING_LENGTH (arg0)));
2436 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2442 if (flags & OEP_ONLY_CONST)
2445 /* Define macros to test an operand from arg0 and arg1 for equality and a
2446 variant that allows null and views null as being different from any
2447 non-null value. In the latter case, if either is null, the both
2448 must be; otherwise, do the normal comparison. */
2449 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2450 TREE_OPERAND (arg1, N), flags)
2452 #define OP_SAME_WITH_NULL(N) \
2453 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2454 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2456 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2459 /* Two conversions are equal only if signedness and modes match. */
2460 switch (TREE_CODE (arg0))
2465 case FIX_TRUNC_EXPR:
2466 case FIX_FLOOR_EXPR:
2467 case FIX_ROUND_EXPR:
2468 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2469 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2479 case tcc_comparison:
2481 if (OP_SAME (0) && OP_SAME (1))
2484 /* For commutative ops, allow the other order. */
2485 return (commutative_tree_code (TREE_CODE (arg0))
2486 && operand_equal_p (TREE_OPERAND (arg0, 0),
2487 TREE_OPERAND (arg1, 1), flags)
2488 && operand_equal_p (TREE_OPERAND (arg0, 1),
2489 TREE_OPERAND (arg1, 0), flags));
2492 /* If either of the pointer (or reference) expressions we are
2493 dereferencing contain a side effect, these cannot be equal. */
2494 if (TREE_SIDE_EFFECTS (arg0)
2495 || TREE_SIDE_EFFECTS (arg1))
2498 switch (TREE_CODE (arg0))
2501 case ALIGN_INDIRECT_REF:
2502 case MISALIGNED_INDIRECT_REF:
2508 case ARRAY_RANGE_REF:
2509 /* Operands 2 and 3 may be null. */
2512 && OP_SAME_WITH_NULL (2)
2513 && OP_SAME_WITH_NULL (3));
2516 /* Handle operand 2 the same as for ARRAY_REF. */
2517 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2520 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2526 case tcc_expression:
2527 switch (TREE_CODE (arg0))
2530 case TRUTH_NOT_EXPR:
2533 case TRUTH_ANDIF_EXPR:
2534 case TRUTH_ORIF_EXPR:
2535 return OP_SAME (0) && OP_SAME (1);
2537 case TRUTH_AND_EXPR:
2539 case TRUTH_XOR_EXPR:
2540 if (OP_SAME (0) && OP_SAME (1))
2543 /* Otherwise take into account this is a commutative operation. */
2544 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2545 TREE_OPERAND (arg1, 1), flags)
2546 && operand_equal_p (TREE_OPERAND (arg0, 1),
2547 TREE_OPERAND (arg1, 0), flags));
2550 /* If the CALL_EXPRs call different functions, then they
2551 clearly can not be equal. */
2556 unsigned int cef = call_expr_flags (arg0);
2557 if (flags & OEP_PURE_SAME)
2558 cef &= ECF_CONST | ECF_PURE;
2565 /* Now see if all the arguments are the same. operand_equal_p
2566 does not handle TREE_LIST, so we walk the operands here
2567 feeding them to operand_equal_p. */
2568 arg0 = TREE_OPERAND (arg0, 1);
2569 arg1 = TREE_OPERAND (arg1, 1);
2570 while (arg0 && arg1)
2572 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2576 arg0 = TREE_CHAIN (arg0);
2577 arg1 = TREE_CHAIN (arg1);
2580 /* If we get here and both argument lists are exhausted
2581 then the CALL_EXPRs are equal. */
2582 return ! (arg0 || arg1);
2588 case tcc_declaration:
2589 /* Consider __builtin_sqrt equal to sqrt. */
2590 return (TREE_CODE (arg0) == FUNCTION_DECL
2591 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2592 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2593 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2600 #undef OP_SAME_WITH_NULL
2603 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2604 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2606 When in doubt, return 0. */
2609 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2611 int unsignedp1, unsignedpo;
2612 tree primarg0, primarg1, primother;
2613 unsigned int correct_width;
2615 if (operand_equal_p (arg0, arg1, 0))
2618 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2619 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2622 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2623 and see if the inner values are the same. This removes any
2624 signedness comparison, which doesn't matter here. */
2625 primarg0 = arg0, primarg1 = arg1;
2626 STRIP_NOPS (primarg0);
2627 STRIP_NOPS (primarg1);
2628 if (operand_equal_p (primarg0, primarg1, 0))
2631 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2632 actual comparison operand, ARG0.
2634 First throw away any conversions to wider types
2635 already present in the operands. */
2637 primarg1 = get_narrower (arg1, &unsignedp1);
2638 primother = get_narrower (other, &unsignedpo);
2640 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2641 if (unsignedp1 == unsignedpo
2642 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2643 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2645 tree type = TREE_TYPE (arg0);
2647 /* Make sure shorter operand is extended the right way
2648 to match the longer operand. */
2649 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2650 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2652 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2659 /* See if ARG is an expression that is either a comparison or is performing
2660 arithmetic on comparisons. The comparisons must only be comparing
2661 two different values, which will be stored in *CVAL1 and *CVAL2; if
2662 they are nonzero it means that some operands have already been found.
2663 No variables may be used anywhere else in the expression except in the
2664 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2665 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2667 If this is true, return 1. Otherwise, return zero. */
2670 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2672 enum tree_code code = TREE_CODE (arg);
2673 enum tree_code_class class = TREE_CODE_CLASS (code);
2675 /* We can handle some of the tcc_expression cases here. */
2676 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2678 else if (class == tcc_expression
2679 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2680 || code == COMPOUND_EXPR))
2683 else if (class == tcc_expression && code == SAVE_EXPR
2684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2686 /* If we've already found a CVAL1 or CVAL2, this expression is
2687 two complex to handle. */
2688 if (*cval1 || *cval2)
2698 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2701 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2702 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2703 cval1, cval2, save_p));
2708 case tcc_expression:
2709 if (code == COND_EXPR)
2710 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2711 cval1, cval2, save_p)
2712 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2713 cval1, cval2, save_p)
2714 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2715 cval1, cval2, save_p));
2718 case tcc_comparison:
2719 /* First see if we can handle the first operand, then the second. For
2720 the second operand, we know *CVAL1 can't be zero. It must be that
2721 one side of the comparison is each of the values; test for the
2722 case where this isn't true by failing if the two operands
2725 if (operand_equal_p (TREE_OPERAND (arg, 0),
2726 TREE_OPERAND (arg, 1), 0))
2730 *cval1 = TREE_OPERAND (arg, 0);
2731 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2733 else if (*cval2 == 0)
2734 *cval2 = TREE_OPERAND (arg, 0);
2735 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2740 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2742 else if (*cval2 == 0)
2743 *cval2 = TREE_OPERAND (arg, 1);
2744 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2756 /* ARG is a tree that is known to contain just arithmetic operations and
2757 comparisons. Evaluate the operations in the tree substituting NEW0 for
2758 any occurrence of OLD0 as an operand of a comparison and likewise for
2762 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2764 tree type = TREE_TYPE (arg);
2765 enum tree_code code = TREE_CODE (arg);
2766 enum tree_code_class class = TREE_CODE_CLASS (code);
2768 /* We can handle some of the tcc_expression cases here. */
2769 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2771 else if (class == tcc_expression
2772 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2778 return fold (build1 (code, type,
2779 eval_subst (TREE_OPERAND (arg, 0),
2780 old0, new0, old1, new1)));
2783 return fold (build2 (code, type,
2784 eval_subst (TREE_OPERAND (arg, 0),
2785 old0, new0, old1, new1),
2786 eval_subst (TREE_OPERAND (arg, 1),
2787 old0, new0, old1, new1)));
2789 case tcc_expression:
2793 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2796 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2799 return fold (build3 (code, type,
2800 eval_subst (TREE_OPERAND (arg, 0),
2801 old0, new0, old1, new1),
2802 eval_subst (TREE_OPERAND (arg, 1),
2803 old0, new0, old1, new1),
2804 eval_subst (TREE_OPERAND (arg, 2),
2805 old0, new0, old1, new1)));
2809 /* Fall through - ??? */
2811 case tcc_comparison:
2813 tree arg0 = TREE_OPERAND (arg, 0);
2814 tree arg1 = TREE_OPERAND (arg, 1);
2816 /* We need to check both for exact equality and tree equality. The
2817 former will be true if the operand has a side-effect. In that
2818 case, we know the operand occurred exactly once. */
2820 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2822 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2825 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2827 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2830 return fold (build2 (code, type, arg0, arg1));
2838 /* Return a tree for the case when the result of an expression is RESULT
2839 converted to TYPE and OMITTED was previously an operand of the expression
2840 but is now not needed (e.g., we folded OMITTED * 0).
2842 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2843 the conversion of RESULT to TYPE. */
2846 omit_one_operand (tree type, tree result, tree omitted)
2848 tree t = fold_convert (type, result);
2850 if (TREE_SIDE_EFFECTS (omitted))
2851 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2853 return non_lvalue (t);
2856 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2859 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2861 tree t = fold_convert (type, result);
2863 if (TREE_SIDE_EFFECTS (omitted))
2864 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2866 return pedantic_non_lvalue (t);
2869 /* Return a tree for the case when the result of an expression is RESULT
2870 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2871 of the expression but are now not needed.
2873 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2874 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2875 evaluated before OMITTED2. Otherwise, if neither has side effects,
2876 just do the conversion of RESULT to TYPE. */
2879 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2881 tree t = fold_convert (type, result);
2883 if (TREE_SIDE_EFFECTS (omitted2))
2884 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2885 if (TREE_SIDE_EFFECTS (omitted1))
2886 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2888 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2892 /* Return a simplified tree node for the truth-negation of ARG. This
2893 never alters ARG itself. We assume that ARG is an operation that
2894 returns a truth value (0 or 1).
2896 FIXME: one would think we would fold the result, but it causes
2897 problems with the dominator optimizer. */
2899 invert_truthvalue (tree arg)
2901 tree type = TREE_TYPE (arg);
2902 enum tree_code code = TREE_CODE (arg);
2904 if (code == ERROR_MARK)
2907 /* If this is a comparison, we can simply invert it, except for
2908 floating-point non-equality comparisons, in which case we just
2909 enclose a TRUTH_NOT_EXPR around what we have. */
2911 if (TREE_CODE_CLASS (code) == tcc_comparison)
2913 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2914 if (FLOAT_TYPE_P (op_type)
2915 && flag_trapping_math
2916 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2917 && code != NE_EXPR && code != EQ_EXPR)
2918 return build1 (TRUTH_NOT_EXPR, type, arg);
2921 code = invert_tree_comparison (code,
2922 HONOR_NANS (TYPE_MODE (op_type)));
2923 if (code == ERROR_MARK)
2924 return build1 (TRUTH_NOT_EXPR, type, arg);
2926 return build2 (code, type,
2927 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2934 return fold_convert (type,
2935 build_int_cst (NULL_TREE, integer_zerop (arg)));
2937 case TRUTH_AND_EXPR:
2938 return build2 (TRUTH_OR_EXPR, type,
2939 invert_truthvalue (TREE_OPERAND (arg, 0)),
2940 invert_truthvalue (TREE_OPERAND (arg, 1)));
2943 return build2 (TRUTH_AND_EXPR, type,
2944 invert_truthvalue (TREE_OPERAND (arg, 0)),
2945 invert_truthvalue (TREE_OPERAND (arg, 1)));
2947 case TRUTH_XOR_EXPR:
2948 /* Here we can invert either operand. We invert the first operand
2949 unless the second operand is a TRUTH_NOT_EXPR in which case our
2950 result is the XOR of the first operand with the inside of the
2951 negation of the second operand. */
2953 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2954 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2955 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2957 return build2 (TRUTH_XOR_EXPR, type,
2958 invert_truthvalue (TREE_OPERAND (arg, 0)),
2959 TREE_OPERAND (arg, 1));
2961 case TRUTH_ANDIF_EXPR:
2962 return build2 (TRUTH_ORIF_EXPR, type,
2963 invert_truthvalue (TREE_OPERAND (arg, 0)),
2964 invert_truthvalue (TREE_OPERAND (arg, 1)));
2966 case TRUTH_ORIF_EXPR:
2967 return build2 (TRUTH_ANDIF_EXPR, type,
2968 invert_truthvalue (TREE_OPERAND (arg, 0)),
2969 invert_truthvalue (TREE_OPERAND (arg, 1)));
2971 case TRUTH_NOT_EXPR:
2972 return TREE_OPERAND (arg, 0);
2975 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2976 invert_truthvalue (TREE_OPERAND (arg, 1)),
2977 invert_truthvalue (TREE_OPERAND (arg, 2)));
2980 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2981 invert_truthvalue (TREE_OPERAND (arg, 1)));
2983 case NON_LVALUE_EXPR:
2984 return invert_truthvalue (TREE_OPERAND (arg, 0));
2987 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2992 return build1 (TREE_CODE (arg), type,
2993 invert_truthvalue (TREE_OPERAND (arg, 0)));
2996 if (!integer_onep (TREE_OPERAND (arg, 1)))
2998 return build2 (EQ_EXPR, type, arg,
2999 fold_convert (type, integer_zero_node));
3002 return build1 (TRUTH_NOT_EXPR, type, arg);
3004 case CLEANUP_POINT_EXPR:
3005 return build1 (CLEANUP_POINT_EXPR, type,
3006 invert_truthvalue (TREE_OPERAND (arg, 0)));
3011 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3012 return build1 (TRUTH_NOT_EXPR, type, arg);
3015 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3016 operands are another bit-wise operation with a common input. If so,
3017 distribute the bit operations to save an operation and possibly two if
3018 constants are involved. For example, convert
3019 (A | B) & (A | C) into A | (B & C)
3020 Further simplification will occur if B and C are constants.
3022 If this optimization cannot be done, 0 will be returned. */
3025 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3030 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3031 || TREE_CODE (arg0) == code
3032 || (TREE_CODE (arg0) != BIT_AND_EXPR
3033 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3036 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3038 common = TREE_OPERAND (arg0, 0);
3039 left = TREE_OPERAND (arg0, 1);
3040 right = TREE_OPERAND (arg1, 1);
3042 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3044 common = TREE_OPERAND (arg0, 0);
3045 left = TREE_OPERAND (arg0, 1);
3046 right = TREE_OPERAND (arg1, 0);
3048 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3050 common = TREE_OPERAND (arg0, 1);
3051 left = TREE_OPERAND (arg0, 0);
3052 right = TREE_OPERAND (arg1, 1);
3054 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3056 common = TREE_OPERAND (arg0, 1);
3057 left = TREE_OPERAND (arg0, 0);
3058 right = TREE_OPERAND (arg1, 0);
3063 return fold (build2 (TREE_CODE (arg0), type, common,
3064 fold (build2 (code, type, left, right))));
3067 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3068 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3071 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3074 tree result = build3 (BIT_FIELD_REF, type, inner,
3075 size_int (bitsize), bitsize_int (bitpos));
3077 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3082 /* Optimize a bit-field compare.
3084 There are two cases: First is a compare against a constant and the
3085 second is a comparison of two items where the fields are at the same
3086 bit position relative to the start of a chunk (byte, halfword, word)
3087 large enough to contain it. In these cases we can avoid the shift
3088 implicit in bitfield extractions.
3090 For constants, we emit a compare of the shifted constant with the
3091 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3092 compared. For two fields at the same position, we do the ANDs with the
3093 similar mask and compare the result of the ANDs.
3095 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3096 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3097 are the left and right operands of the comparison, respectively.
3099 If the optimization described above can be done, we return the resulting
3100 tree. Otherwise we return zero. */
3103 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3106 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3107 tree type = TREE_TYPE (lhs);
3108 tree signed_type, unsigned_type;
3109 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3110 enum machine_mode lmode, rmode, nmode;
3111 int lunsignedp, runsignedp;
3112 int lvolatilep = 0, rvolatilep = 0;
3113 tree linner, rinner = NULL_TREE;
3117 /* Get all the information about the extractions being done. If the bit size
3118 if the same as the size of the underlying object, we aren't doing an
3119 extraction at all and so can do nothing. We also don't want to
3120 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3121 then will no longer be able to replace it. */
3122 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3123 &lunsignedp, &lvolatilep, false);
3124 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3125 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3130 /* If this is not a constant, we can only do something if bit positions,
3131 sizes, and signedness are the same. */
3132 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3133 &runsignedp, &rvolatilep, false);
3135 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3136 || lunsignedp != runsignedp || offset != 0
3137 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3141 /* See if we can find a mode to refer to this field. We should be able to,
3142 but fail if we can't. */
3143 nmode = get_best_mode (lbitsize, lbitpos,
3144 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3145 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3146 TYPE_ALIGN (TREE_TYPE (rinner))),
3147 word_mode, lvolatilep || rvolatilep);
3148 if (nmode == VOIDmode)
3151 /* Set signed and unsigned types of the precision of this mode for the
3153 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3154 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3156 /* Compute the bit position and size for the new reference and our offset
3157 within it. If the new reference is the same size as the original, we
3158 won't optimize anything, so return zero. */
3159 nbitsize = GET_MODE_BITSIZE (nmode);
3160 nbitpos = lbitpos & ~ (nbitsize - 1);
3162 if (nbitsize == lbitsize)
3165 if (BYTES_BIG_ENDIAN)
3166 lbitpos = nbitsize - lbitsize - lbitpos;
3168 /* Make the mask to be used against the extracted field. */
3169 mask = build_int_cst (unsigned_type, -1);
3170 mask = force_fit_type (mask, 0, false, false);
3171 mask = fold_convert (unsigned_type, mask);
3172 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3173 mask = const_binop (RSHIFT_EXPR, mask,
3174 size_int (nbitsize - lbitsize - lbitpos), 0);
3177 /* If not comparing with constant, just rework the comparison
3179 return build2 (code, compare_type,
3180 build2 (BIT_AND_EXPR, unsigned_type,
3181 make_bit_field_ref (linner, unsigned_type,
3182 nbitsize, nbitpos, 1),
3184 build2 (BIT_AND_EXPR, unsigned_type,
3185 make_bit_field_ref (rinner, unsigned_type,
3186 nbitsize, nbitpos, 1),
3189 /* Otherwise, we are handling the constant case. See if the constant is too
3190 big for the field. Warn and return a tree of for 0 (false) if so. We do
3191 this not only for its own sake, but to avoid having to test for this
3192 error case below. If we didn't, we might generate wrong code.
3194 For unsigned fields, the constant shifted right by the field length should
3195 be all zero. For signed fields, the high-order bits should agree with
3200 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3201 fold_convert (unsigned_type, rhs),
3202 size_int (lbitsize), 0)))
3204 warning ("comparison is always %d due to width of bit-field",
3206 return constant_boolean_node (code == NE_EXPR, compare_type);
3211 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3212 size_int (lbitsize - 1), 0);
3213 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3215 warning ("comparison is always %d due to width of bit-field",
3217 return constant_boolean_node (code == NE_EXPR, compare_type);
3221 /* Single-bit compares should always be against zero. */
3222 if (lbitsize == 1 && ! integer_zerop (rhs))
3224 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3225 rhs = fold_convert (type, integer_zero_node);
3228 /* Make a new bitfield reference, shift the constant over the
3229 appropriate number of bits and mask it with the computed mask
3230 (in case this was a signed field). If we changed it, make a new one. */
3231 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3234 TREE_SIDE_EFFECTS (lhs) = 1;
3235 TREE_THIS_VOLATILE (lhs) = 1;
3238 rhs = fold (const_binop (BIT_AND_EXPR,
3239 const_binop (LSHIFT_EXPR,
3240 fold_convert (unsigned_type, rhs),
3241 size_int (lbitpos), 0),
3244 return build2 (code, compare_type,
3245 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3249 /* Subroutine for fold_truthop: decode a field reference.
3251 If EXP is a comparison reference, we return the innermost reference.
3253 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3254 set to the starting bit number.
3256 If the innermost field can be completely contained in a mode-sized
3257 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3259 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3260 otherwise it is not changed.
3262 *PUNSIGNEDP is set to the signedness of the field.
3264 *PMASK is set to the mask used. This is either contained in a
3265 BIT_AND_EXPR or derived from the width of the field.
3267 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3269 Return 0 if this is not a component reference or is one that we can't
3270 do anything with. */
3273 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3274 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3275 int *punsignedp, int *pvolatilep,
3276 tree *pmask, tree *pand_mask)
3278 tree outer_type = 0;
3280 tree mask, inner, offset;
3282 unsigned int precision;
3284 /* All the optimizations using this function assume integer fields.
3285 There are problems with FP fields since the type_for_size call
3286 below can fail for, e.g., XFmode. */
3287 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3290 /* We are interested in the bare arrangement of bits, so strip everything
3291 that doesn't affect the machine mode. However, record the type of the
3292 outermost expression if it may matter below. */
3293 if (TREE_CODE (exp) == NOP_EXPR
3294 || TREE_CODE (exp) == CONVERT_EXPR
3295 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3296 outer_type = TREE_TYPE (exp);
3299 if (TREE_CODE (exp) == BIT_AND_EXPR)
3301 and_mask = TREE_OPERAND (exp, 1);
3302 exp = TREE_OPERAND (exp, 0);
3303 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3304 if (TREE_CODE (and_mask) != INTEGER_CST)
3308 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3309 punsignedp, pvolatilep, false);
3310 if ((inner == exp && and_mask == 0)
3311 || *pbitsize < 0 || offset != 0
3312 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3315 /* If the number of bits in the reference is the same as the bitsize of
3316 the outer type, then the outer type gives the signedness. Otherwise
3317 (in case of a small bitfield) the signedness is unchanged. */
3318 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3319 *punsignedp = TYPE_UNSIGNED (outer_type);
3321 /* Compute the mask to access the bitfield. */
3322 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3323 precision = TYPE_PRECISION (unsigned_type);
3325 mask = build_int_cst (unsigned_type, -1);
3326 mask = force_fit_type (mask, 0, false, false);
3328 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3329 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3331 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3333 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3334 fold_convert (unsigned_type, and_mask), mask));
3337 *pand_mask = and_mask;
3341 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3345 all_ones_mask_p (tree mask, int size)
3347 tree type = TREE_TYPE (mask);
3348 unsigned int precision = TYPE_PRECISION (type);
3351 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3352 tmask = force_fit_type (tmask, 0, false, false);
3355 tree_int_cst_equal (mask,
3356 const_binop (RSHIFT_EXPR,
3357 const_binop (LSHIFT_EXPR, tmask,
3358 size_int (precision - size),
3360 size_int (precision - size), 0));
3363 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3364 represents the sign bit of EXP's type. If EXP represents a sign
3365 or zero extension, also test VAL against the unextended type.
3366 The return value is the (sub)expression whose sign bit is VAL,
3367 or NULL_TREE otherwise. */
3370 sign_bit_p (tree exp, tree val)
3372 unsigned HOST_WIDE_INT mask_lo, lo;
3373 HOST_WIDE_INT mask_hi, hi;
3377 /* Tree EXP must have an integral type. */
3378 t = TREE_TYPE (exp);
3379 if (! INTEGRAL_TYPE_P (t))
3382 /* Tree VAL must be an integer constant. */
3383 if (TREE_CODE (val) != INTEGER_CST
3384 || TREE_CONSTANT_OVERFLOW (val))
3387 width = TYPE_PRECISION (t);
3388 if (width > HOST_BITS_PER_WIDE_INT)
3390 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3393 mask_hi = ((unsigned HOST_WIDE_INT) -1
3394 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3400 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3403 mask_lo = ((unsigned HOST_WIDE_INT) -1
3404 >> (HOST_BITS_PER_WIDE_INT - width));
3407 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3408 treat VAL as if it were unsigned. */
3409 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3410 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3413 /* Handle extension from a narrower type. */
3414 if (TREE_CODE (exp) == NOP_EXPR
3415 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3416 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3421 /* Subroutine for fold_truthop: determine if an operand is simple enough
3422 to be evaluated unconditionally. */
3425 simple_operand_p (tree exp)
3427 /* Strip any conversions that don't change the machine mode. */
3430 return (CONSTANT_CLASS_P (exp)
3431 || TREE_CODE (exp) == SSA_NAME
3433 && ! TREE_ADDRESSABLE (exp)
3434 && ! TREE_THIS_VOLATILE (exp)
3435 && ! DECL_NONLOCAL (exp)
3436 /* Don't regard global variables as simple. They may be
3437 allocated in ways unknown to the compiler (shared memory,
3438 #pragma weak, etc). */
3439 && ! TREE_PUBLIC (exp)
3440 && ! DECL_EXTERNAL (exp)
3441 /* Loading a static variable is unduly expensive, but global
3442 registers aren't expensive. */
3443 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3446 /* The following functions are subroutines to fold_range_test and allow it to
3447 try to change a logical combination of comparisons into a range test.
3450 X == 2 || X == 3 || X == 4 || X == 5
3454 (unsigned) (X - 2) <= 3
3456 We describe each set of comparisons as being either inside or outside
3457 a range, using a variable named like IN_P, and then describe the
3458 range with a lower and upper bound. If one of the bounds is omitted,
3459 it represents either the highest or lowest value of the type.
3461 In the comments below, we represent a range by two numbers in brackets
3462 preceded by a "+" to designate being inside that range, or a "-" to
3463 designate being outside that range, so the condition can be inverted by
3464 flipping the prefix. An omitted bound is represented by a "-". For
3465 example, "- [-, 10]" means being outside the range starting at the lowest
3466 possible value and ending at 10, in other words, being greater than 10.
3467 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3470 We set up things so that the missing bounds are handled in a consistent
3471 manner so neither a missing bound nor "true" and "false" need to be
3472 handled using a special case. */
3474 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3475 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3476 and UPPER1_P are nonzero if the respective argument is an upper bound
3477 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3478 must be specified for a comparison. ARG1 will be converted to ARG0's
3479 type if both are specified. */
3482 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3483 tree arg1, int upper1_p)
3489 /* If neither arg represents infinity, do the normal operation.
3490 Else, if not a comparison, return infinity. Else handle the special
3491 comparison rules. Note that most of the cases below won't occur, but
3492 are handled for consistency. */
3494 if (arg0 != 0 && arg1 != 0)
3496 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3497 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3499 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3502 if (TREE_CODE_CLASS (code) != tcc_comparison)
3505 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3506 for neither. In real maths, we cannot assume open ended ranges are
3507 the same. But, this is computer arithmetic, where numbers are finite.
3508 We can therefore make the transformation of any unbounded range with
3509 the value Z, Z being greater than any representable number. This permits
3510 us to treat unbounded ranges as equal. */
3511 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3512 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3516 result = sgn0 == sgn1;
3519 result = sgn0 != sgn1;
3522 result = sgn0 < sgn1;
3525 result = sgn0 <= sgn1;
3528 result = sgn0 > sgn1;
3531 result = sgn0 >= sgn1;
3537 return constant_boolean_node (result, type);
3540 /* Given EXP, a logical expression, set the range it is testing into
3541 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3542 actually being tested. *PLOW and *PHIGH will be made of the same type
3543 as the returned expression. If EXP is not a comparison, we will most
3544 likely not be returning a useful value and range. */
3547 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3549 enum tree_code code;
3550 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3551 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3553 tree low, high, n_low, n_high;
3555 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3556 and see if we can refine the range. Some of the cases below may not
3557 happen, but it doesn't seem worth worrying about this. We "continue"
3558 the outer loop when we've changed something; otherwise we "break"
3559 the switch, which will "break" the while. */
3562 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3566 code = TREE_CODE (exp);
3567 exp_type = TREE_TYPE (exp);
3569 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3571 if (TREE_CODE_LENGTH (code) > 0)
3572 arg0 = TREE_OPERAND (exp, 0);
3573 if (TREE_CODE_CLASS (code) == tcc_comparison
3574 || TREE_CODE_CLASS (code) == tcc_unary
3575 || TREE_CODE_CLASS (code) == tcc_binary)
3576 arg0_type = TREE_TYPE (arg0);
3577 if (TREE_CODE_CLASS (code) == tcc_binary
3578 || TREE_CODE_CLASS (code) == tcc_comparison
3579 || (TREE_CODE_CLASS (code) == tcc_expression
3580 && TREE_CODE_LENGTH (code) > 1))
3581 arg1 = TREE_OPERAND (exp, 1);
3586 case TRUTH_NOT_EXPR:
3587 in_p = ! in_p, exp = arg0;
3590 case EQ_EXPR: case NE_EXPR:
3591 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3592 /* We can only do something if the range is testing for zero
3593 and if the second operand is an integer constant. Note that
3594 saying something is "in" the range we make is done by
3595 complementing IN_P since it will set in the initial case of
3596 being not equal to zero; "out" is leaving it alone. */
3597 if (low == 0 || high == 0
3598 || ! integer_zerop (low) || ! integer_zerop (high)
3599 || TREE_CODE (arg1) != INTEGER_CST)
3604 case NE_EXPR: /* - [c, c] */
3607 case EQ_EXPR: /* + [c, c] */
3608 in_p = ! in_p, low = high = arg1;
3610 case GT_EXPR: /* - [-, c] */
3611 low = 0, high = arg1;
3613 case GE_EXPR: /* + [c, -] */
3614 in_p = ! in_p, low = arg1, high = 0;
3616 case LT_EXPR: /* - [c, -] */
3617 low = arg1, high = 0;
3619 case LE_EXPR: /* + [-, c] */
3620 in_p = ! in_p, low = 0, high = arg1;
3626 /* If this is an unsigned comparison, we also know that EXP is
3627 greater than or equal to zero. We base the range tests we make
3628 on that fact, so we record it here so we can parse existing
3629 range tests. We test arg0_type since often the return type
3630 of, e.g. EQ_EXPR, is boolean. */
3631 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3633 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3635 fold_convert (arg0_type, integer_zero_node),
3639 in_p = n_in_p, low = n_low, high = n_high;
3641 /* If the high bound is missing, but we have a nonzero low
3642 bound, reverse the range so it goes from zero to the low bound
3644 if (high == 0 && low && ! integer_zerop (low))
3647 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3648 integer_one_node, 0);
3649 low = fold_convert (arg0_type, integer_zero_node);
3657 /* (-x) IN [a,b] -> x in [-b, -a] */
3658 n_low = range_binop (MINUS_EXPR, exp_type,
3659 fold_convert (exp_type, integer_zero_node),
3661 n_high = range_binop (MINUS_EXPR, exp_type,
3662 fold_convert (exp_type, integer_zero_node),
3664 low = n_low, high = n_high;
3670 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3671 fold_convert (exp_type, integer_one_node));
3674 case PLUS_EXPR: case MINUS_EXPR:
3675 if (TREE_CODE (arg1) != INTEGER_CST)
3678 /* If EXP is signed, any overflow in the computation is undefined,
3679 so we don't worry about it so long as our computations on
3680 the bounds don't overflow. For unsigned, overflow is defined
3681 and this is exactly the right thing. */
3682 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3683 arg0_type, low, 0, arg1, 0);
3684 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3685 arg0_type, high, 1, arg1, 0);
3686 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3687 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3690 /* Check for an unsigned range which has wrapped around the maximum
3691 value thus making n_high < n_low, and normalize it. */
3692 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3694 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3695 integer_one_node, 0);
3696 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3697 integer_one_node, 0);
3699 /* If the range is of the form +/- [ x+1, x ], we won't
3700 be able to normalize it. But then, it represents the
3701 whole range or the empty set, so make it
3703 if (tree_int_cst_equal (n_low, low)
3704 && tree_int_cst_equal (n_high, high))
3710 low = n_low, high = n_high;
3715 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3716 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3719 if (! INTEGRAL_TYPE_P (arg0_type)
3720 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3721 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3724 n_low = low, n_high = high;
3727 n_low = fold_convert (arg0_type, n_low);
3730 n_high = fold_convert (arg0_type, n_high);
3733 /* If we're converting arg0 from an unsigned type, to exp,
3734 a signed type, we will be doing the comparison as unsigned.
3735 The tests above have already verified that LOW and HIGH
3738 So we have to ensure that we will handle large unsigned
3739 values the same way that the current signed bounds treat
3742 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3745 tree equiv_type = lang_hooks.types.type_for_mode
3746 (TYPE_MODE (arg0_type), 1);
3748 /* A range without an upper bound is, naturally, unbounded.
3749 Since convert would have cropped a very large value, use
3750 the max value for the destination type. */
3752 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3753 : TYPE_MAX_VALUE (arg0_type);
3755 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3756 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3757 fold_convert (arg0_type,
3759 fold_convert (arg0_type,
3760 integer_one_node)));
3762 /* If the low bound is specified, "and" the range with the
3763 range for which the original unsigned value will be
3767 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3768 1, n_low, n_high, 1,
3769 fold_convert (arg0_type,
3774 in_p = (n_in_p == in_p);
3778 /* Otherwise, "or" the range with the range of the input
3779 that will be interpreted as negative. */
3780 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3781 0, n_low, n_high, 1,
3782 fold_convert (arg0_type,
3787 in_p = (in_p != n_in_p);
3792 low = n_low, high = n_high;
3802 /* If EXP is a constant, we can evaluate whether this is true or false. */
3803 if (TREE_CODE (exp) == INTEGER_CST)
3805 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3807 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3813 *pin_p = in_p, *plow = low, *phigh = high;
3817 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3818 type, TYPE, return an expression to test if EXP is in (or out of, depending
3819 on IN_P) the range. Return 0 if the test couldn't be created. */
3822 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3824 tree etype = TREE_TYPE (exp);
3829 value = build_range_check (type, exp, 1, low, high);
3831 return invert_truthvalue (value);
3836 if (low == 0 && high == 0)
3837 return fold_convert (type, integer_one_node);
3840 return fold (build2 (LE_EXPR, type, exp, high));
3843 return fold (build2 (GE_EXPR, type, exp, low));
3845 if (operand_equal_p (low, high, 0))
3846 return fold (build2 (EQ_EXPR, type, exp, low));
3848 if (integer_zerop (low))
3850 if (! TYPE_UNSIGNED (etype))
3852 etype = lang_hooks.types.unsigned_type (etype);
3853 high = fold_convert (etype, high);
3854 exp = fold_convert (etype, exp);
3856 return build_range_check (type, exp, 1, 0, high);
3859 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3860 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3862 unsigned HOST_WIDE_INT lo;
3866 prec = TYPE_PRECISION (etype);
3867 if (prec <= HOST_BITS_PER_WIDE_INT)
3870 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3874 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3875 lo = (unsigned HOST_WIDE_INT) -1;
3878 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3880 if (TYPE_UNSIGNED (etype))
3882 etype = lang_hooks.types.signed_type (etype);
3883 exp = fold_convert (etype, exp);
3885 return fold (build2 (GT_EXPR, type, exp,
3886 fold_convert (etype, integer_zero_node)));
3890 value = const_binop (MINUS_EXPR, high, low, 0);
3891 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3893 tree utype, minv, maxv;
3895 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3896 for the type in question, as we rely on this here. */
3897 switch (TREE_CODE (etype))
3902 utype = lang_hooks.types.unsigned_type (etype);
3903 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3904 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3905 integer_one_node, 1);
3906 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3907 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3911 high = fold_convert (etype, high);
3912 low = fold_convert (etype, low);
3913 exp = fold_convert (etype, exp);
3914 value = const_binop (MINUS_EXPR, high, low, 0);
3922 if (value != 0 && ! TREE_OVERFLOW (value))
3923 return build_range_check (type,
3924 fold (build2 (MINUS_EXPR, etype, exp, low)),
3925 1, fold_convert (etype, integer_zero_node),
3931 /* Given two ranges, see if we can merge them into one. Return 1 if we
3932 can, 0 if we can't. Set the output range into the specified parameters. */
3935 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3936 tree high0, int in1_p, tree low1, tree high1)
3944 int lowequal = ((low0 == 0 && low1 == 0)
3945 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3946 low0, 0, low1, 0)));
3947 int highequal = ((high0 == 0 && high1 == 0)
3948 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3949 high0, 1, high1, 1)));
3951 /* Make range 0 be the range that starts first, or ends last if they
3952 start at the same value. Swap them if it isn't. */
3953 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3956 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3957 high1, 1, high0, 1))))
3959 temp = in0_p, in0_p = in1_p, in1_p = temp;
3960 tem = low0, low0 = low1, low1 = tem;
3961 tem = high0, high0 = high1, high1 = tem;
3964 /* Now flag two cases, whether the ranges are disjoint or whether the
3965 second range is totally subsumed in the first. Note that the tests
3966 below are simplified by the ones above. */
3967 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3968 high0, 1, low1, 0));
3969 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3970 high1, 1, high0, 1));
3972 /* We now have four cases, depending on whether we are including or
3973 excluding the two ranges. */
3976 /* If they don't overlap, the result is false. If the second range
3977 is a subset it is the result. Otherwise, the range is from the start
3978 of the second to the end of the first. */
3980 in_p = 0, low = high = 0;
3982 in_p = 1, low = low1, high = high1;
3984 in_p = 1, low = low1, high = high0;
3987 else if (in0_p && ! in1_p)
3989 /* If they don't overlap, the result is the first range. If they are
3990 equal, the result is false. If the second range is a subset of the
3991 first, and the ranges begin at the same place, we go from just after
3992 the end of the first range to the end of the second. If the second
3993 range is not a subset of the first, or if it is a subset and both
3994 ranges end at the same place, the range starts at the start of the
3995 first range and ends just before the second range.
3996 Otherwise, we can't describe this as a single range. */
3998 in_p = 1, low = low0, high = high0;
3999 else if (lowequal && highequal)
4000 in_p = 0, low = high = 0;
4001 else if (subset && lowequal)
4003 in_p = 1, high = high0;
4004 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4005 integer_one_node, 0);
4007 else if (! subset || highequal)
4009 in_p = 1, low = low0;
4010 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4011 integer_one_node, 0);
4017 else if (! in0_p && in1_p)
4019 /* If they don't overlap, the result is the second range. If the second
4020 is a subset of the first, the result is false. Otherwise,
4021 the range starts just after the first range and ends at the
4022 end of the second. */
4024 in_p = 1, low = low1, high = high1;
4025 else if (subset || highequal)
4026 in_p = 0, low = high = 0;
4029 in_p = 1, high = high1;
4030 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4031 integer_one_node, 0);
4037 /* The case where we are excluding both ranges. Here the complex case
4038 is if they don't overlap. In that case, the only time we have a
4039 range is if they are adjacent. If the second is a subset of the
4040 first, the result is the first. Otherwise, the range to exclude
4041 starts at the beginning of the first range and ends at the end of the
4045 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4046 range_binop (PLUS_EXPR, NULL_TREE,
4048 integer_one_node, 1),
4050 in_p = 0, low = low0, high = high1;
4053 /* Canonicalize - [min, x] into - [-, x]. */
4054 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4055 switch (TREE_CODE (TREE_TYPE (low0)))
4058 if (TYPE_PRECISION (TREE_TYPE (low0))
4059 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4064 if (tree_int_cst_equal (low0,
4065 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4069 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4070 && integer_zerop (low0))
4077 /* Canonicalize - [x, max] into - [x, -]. */
4078 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4079 switch (TREE_CODE (TREE_TYPE (high1)))
4082 if (TYPE_PRECISION (TREE_TYPE (high1))
4083 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4088 if (tree_int_cst_equal (high1,
4089 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4093 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4094 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4096 integer_one_node, 1)))
4103 /* The ranges might be also adjacent between the maximum and
4104 minimum values of the given type. For
4105 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4106 return + [x + 1, y - 1]. */
4107 if (low0 == 0 && high1 == 0)
4109 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4110 integer_one_node, 1);
4111 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4112 integer_one_node, 0);
4113 if (low == 0 || high == 0)
4123 in_p = 0, low = low0, high = high0;
4125 in_p = 0, low = low0, high = high1;
4128 *pin_p = in_p, *plow = low, *phigh = high;
4133 /* Subroutine of fold, looking inside expressions of the form
4134 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4135 of the COND_EXPR. This function is being used also to optimize
4136 A op B ? C : A, by reversing the comparison first.
4138 Return a folded expression whose code is not a COND_EXPR
4139 anymore, or NULL_TREE if no folding opportunity is found. */
4142 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4144 enum tree_code comp_code = TREE_CODE (arg0);
4145 tree arg00 = TREE_OPERAND (arg0, 0);
4146 tree arg01 = TREE_OPERAND (arg0, 1);
4147 tree arg1_type = TREE_TYPE (arg1);
4153 /* If we have A op 0 ? A : -A, consider applying the following
4156 A == 0? A : -A same as -A
4157 A != 0? A : -A same as A
4158 A >= 0? A : -A same as abs (A)
4159 A > 0? A : -A same as abs (A)
4160 A <= 0? A : -A same as -abs (A)
4161 A < 0? A : -A same as -abs (A)
4163 None of these transformations work for modes with signed
4164 zeros. If A is +/-0, the first two transformations will
4165 change the sign of the result (from +0 to -0, or vice
4166 versa). The last four will fix the sign of the result,
4167 even though the original expressions could be positive or
4168 negative, depending on the sign of A.
4170 Note that all these transformations are correct if A is
4171 NaN, since the two alternatives (A and -A) are also NaNs. */
4172 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4173 ? real_zerop (arg01)
4174 : integer_zerop (arg01))
4175 && TREE_CODE (arg2) == NEGATE_EXPR
4176 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4181 tem = fold_convert (arg1_type, arg1);
4182 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4185 return pedantic_non_lvalue (fold_convert (type, arg1));
4188 if (flag_trapping_math)
4193 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4194 arg1 = fold_convert (lang_hooks.types.signed_type
4195 (TREE_TYPE (arg1)), arg1);
4196 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4197 return pedantic_non_lvalue (fold_convert (type, tem));
4200 if (flag_trapping_math)
4204 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4205 arg1 = fold_convert (lang_hooks.types.signed_type
4206 (TREE_TYPE (arg1)), arg1);
4207 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4208 return negate_expr (fold_convert (type, tem));
4210 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4214 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4215 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4216 both transformations are correct when A is NaN: A != 0
4217 is then true, and A == 0 is false. */
4219 if (integer_zerop (arg01) && integer_zerop (arg2))
4221 if (comp_code == NE_EXPR)
4222 return pedantic_non_lvalue (fold_convert (type, arg1));
4223 else if (comp_code == EQ_EXPR)
4224 return fold_convert (type, integer_zero_node);
4227 /* Try some transformations of A op B ? A : B.
4229 A == B? A : B same as B
4230 A != B? A : B same as A
4231 A >= B? A : B same as max (A, B)
4232 A > B? A : B same as max (B, A)
4233 A <= B? A : B same as min (A, B)
4234 A < B? A : B same as min (B, A)
4236 As above, these transformations don't work in the presence
4237 of signed zeros. For example, if A and B are zeros of
4238 opposite sign, the first two transformations will change
4239 the sign of the result. In the last four, the original
4240 expressions give different results for (A=+0, B=-0) and
4241 (A=-0, B=+0), but the transformed expressions do not.
4243 The first two transformations are correct if either A or B
4244 is a NaN. In the first transformation, the condition will
4245 be false, and B will indeed be chosen. In the case of the
4246 second transformation, the condition A != B will be true,
4247 and A will be chosen.
4249 The conversions to max() and min() are not correct if B is
4250 a number and A is not. The conditions in the original
4251 expressions will be false, so all four give B. The min()
4252 and max() versions would give a NaN instead. */
4253 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4255 tree comp_op0 = arg00;
4256 tree comp_op1 = arg01;
4257 tree comp_type = TREE_TYPE (comp_op0);
4259 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4260 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4270 return pedantic_non_lvalue (fold_convert (type, arg2));
4272 return pedantic_non_lvalue (fold_convert (type, arg1));
4277 /* In C++ a ?: expression can be an lvalue, so put the
4278 operand which will be used if they are equal first
4279 so that we can convert this back to the
4280 corresponding COND_EXPR. */
4281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4283 comp_op0 = fold_convert (comp_type, comp_op0);
4284 comp_op1 = fold_convert (comp_type, comp_op1);
4285 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4286 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4287 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4288 return pedantic_non_lvalue (fold_convert (type, tem));
4295 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4297 comp_op0 = fold_convert (comp_type, comp_op0);
4298 comp_op1 = fold_convert (comp_type, comp_op1);
4299 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4300 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4301 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4302 return pedantic_non_lvalue (fold_convert (type, tem));
4306 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4307 return pedantic_non_lvalue (fold_convert (type, arg2));
4310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4311 return pedantic_non_lvalue (fold_convert (type, arg1));
4314 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4319 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4320 we might still be able to simplify this. For example,
4321 if C1 is one less or one more than C2, this might have started
4322 out as a MIN or MAX and been transformed by this function.
4323 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4325 if (INTEGRAL_TYPE_P (type)
4326 && TREE_CODE (arg01) == INTEGER_CST
4327 && TREE_CODE (arg2) == INTEGER_CST)
4331 /* We can replace A with C1 in this case. */
4332 arg1 = fold_convert (type, arg01);
4333 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4336 /* If C1 is C2 + 1, this is min(A, C2). */
4337 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4339 && operand_equal_p (arg01,
4340 const_binop (PLUS_EXPR, arg2,
4341 integer_one_node, 0),
4343 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4344 type, arg1, arg2)));
4348 /* If C1 is C2 - 1, this is min(A, C2). */
4349 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4351 && operand_equal_p (arg01,
4352 const_binop (MINUS_EXPR, arg2,
4353 integer_one_node, 0),
4355 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4356 type, arg1, arg2)));
4360 /* If C1 is C2 - 1, this is max(A, C2). */
4361 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4363 && operand_equal_p (arg01,
4364 const_binop (MINUS_EXPR, arg2,
4365 integer_one_node, 0),
4367 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4368 type, arg1, arg2)));
4372 /* If C1 is C2 + 1, this is max(A, C2). */
4373 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4375 && operand_equal_p (arg01,
4376 const_binop (PLUS_EXPR, arg2,
4377 integer_one_node, 0),
4379 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4380 type, arg1, arg2)));
4393 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4394 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4397 /* EXP is some logical combination of boolean tests. See if we can
4398 merge it into some range test. Return the new tree if so. */
4401 fold_range_test (tree exp)
4403 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4404 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4405 int in0_p, in1_p, in_p;
4406 tree low0, low1, low, high0, high1, high;
4407 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4408 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4411 /* If this is an OR operation, invert both sides; we will invert
4412 again at the end. */
4414 in0_p = ! in0_p, in1_p = ! in1_p;
4416 /* If both expressions are the same, if we can merge the ranges, and we
4417 can build the range test, return it or it inverted. If one of the
4418 ranges is always true or always false, consider it to be the same
4419 expression as the other. */
4420 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4421 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4423 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4425 : rhs != 0 ? rhs : integer_zero_node,
4427 return or_op ? invert_truthvalue (tem) : tem;
4429 /* On machines where the branch cost is expensive, if this is a
4430 short-circuited branch and the underlying object on both sides
4431 is the same, make a non-short-circuit operation. */
4432 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4433 && lhs != 0 && rhs != 0
4434 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4435 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4436 && operand_equal_p (lhs, rhs, 0))
4438 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4439 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4440 which cases we can't do this. */
4441 if (simple_operand_p (lhs))
4442 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4443 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4444 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4445 TREE_OPERAND (exp, 1));
4447 else if (lang_hooks.decls.global_bindings_p () == 0
4448 && ! CONTAINS_PLACEHOLDER_P (lhs))
4450 tree common = save_expr (lhs);
4452 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4453 or_op ? ! in0_p : in0_p,
4455 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4456 or_op ? ! in1_p : in1_p,
4458 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4460 TREE_TYPE (exp), lhs, rhs);
4467 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4468 bit value. Arrange things so the extra bits will be set to zero if and
4469 only if C is signed-extended to its full width. If MASK is nonzero,
4470 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4473 unextend (tree c, int p, int unsignedp, tree mask)
4475 tree type = TREE_TYPE (c);
4476 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4479 if (p == modesize || unsignedp)
4482 /* We work by getting just the sign bit into the low-order bit, then
4483 into the high-order bit, then sign-extend. We then XOR that value
4485 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4486 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4488 /* We must use a signed type in order to get an arithmetic right shift.
4489 However, we must also avoid introducing accidental overflows, so that
4490 a subsequent call to integer_zerop will work. Hence we must
4491 do the type conversion here. At this point, the constant is either
4492 zero or one, and the conversion to a signed type can never overflow.
4493 We could get an overflow if this conversion is done anywhere else. */
4494 if (TYPE_UNSIGNED (type))
4495 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4497 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4498 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4500 temp = const_binop (BIT_AND_EXPR, temp,
4501 fold_convert (TREE_TYPE (c), mask), 0);
4502 /* If necessary, convert the type back to match the type of C. */
4503 if (TYPE_UNSIGNED (type))
4504 temp = fold_convert (type, temp);
4506 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4509 /* Find ways of folding logical expressions of LHS and RHS:
4510 Try to merge two comparisons to the same innermost item.
4511 Look for range tests like "ch >= '0' && ch <= '9'".
4512 Look for combinations of simple terms on machines with expensive branches
4513 and evaluate the RHS unconditionally.
4515 For example, if we have p->a == 2 && p->b == 4 and we can make an
4516 object large enough to span both A and B, we can do this with a comparison
4517 against the object ANDed with the a mask.
4519 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4520 operations to do this with one comparison.
4522 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4523 function and the one above.
4525 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4526 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4528 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4531 We return the simplified tree or 0 if no optimization is possible. */
4534 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4536 /* If this is the "or" of two comparisons, we can do something if
4537 the comparisons are NE_EXPR. If this is the "and", we can do something
4538 if the comparisons are EQ_EXPR. I.e.,
4539 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4541 WANTED_CODE is this operation code. For single bit fields, we can
4542 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4543 comparison for one-bit fields. */
4545 enum tree_code wanted_code;
4546 enum tree_code lcode, rcode;
4547 tree ll_arg, lr_arg, rl_arg, rr_arg;
4548 tree ll_inner, lr_inner, rl_inner, rr_inner;
4549 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4550 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4551 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4552 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4553 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4554 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4555 enum machine_mode lnmode, rnmode;
4556 tree ll_mask, lr_mask, rl_mask, rr_mask;
4557 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4558 tree l_const, r_const;
4559 tree lntype, rntype, result;
4560 int first_bit, end_bit;
4563 /* Start by getting the comparison codes. Fail if anything is volatile.
4564 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4565 it were surrounded with a NE_EXPR. */
4567 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4570 lcode = TREE_CODE (lhs);
4571 rcode = TREE_CODE (rhs);
4573 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4575 lhs = build2 (NE_EXPR, truth_type, lhs,
4576 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4580 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4582 rhs = build2 (NE_EXPR, truth_type, rhs,
4583 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4587 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4588 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4591 ll_arg = TREE_OPERAND (lhs, 0);
4592 lr_arg = TREE_OPERAND (lhs, 1);
4593 rl_arg = TREE_OPERAND (rhs, 0);
4594 rr_arg = TREE_OPERAND (rhs, 1);
4596 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4597 if (simple_operand_p (ll_arg)
4598 && simple_operand_p (lr_arg))
4601 if (operand_equal_p (ll_arg, rl_arg, 0)
4602 && operand_equal_p (lr_arg, rr_arg, 0))
4604 result = combine_comparisons (code, lcode, rcode,
4605 truth_type, ll_arg, lr_arg);
4609 else if (operand_equal_p (ll_arg, rr_arg, 0)
4610 && operand_equal_p (lr_arg, rl_arg, 0))
4612 result = combine_comparisons (code, lcode,
4613 swap_tree_comparison (rcode),
4614 truth_type, ll_arg, lr_arg);
4620 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4621 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4623 /* If the RHS can be evaluated unconditionally and its operands are
4624 simple, it wins to evaluate the RHS unconditionally on machines
4625 with expensive branches. In this case, this isn't a comparison
4626 that can be merged. Avoid doing this if the RHS is a floating-point
4627 comparison since those can trap. */
4629 if (BRANCH_COST >= 2
4630 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4631 && simple_operand_p (rl_arg)
4632 && simple_operand_p (rr_arg))
4634 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4635 if (code == TRUTH_OR_EXPR
4636 && lcode == NE_EXPR && integer_zerop (lr_arg)
4637 && rcode == NE_EXPR && integer_zerop (rr_arg)
4638 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4639 return build2 (NE_EXPR, truth_type,
4640 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4642 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4644 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4645 if (code == TRUTH_AND_EXPR
4646 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4647 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4648 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4649 return build2 (EQ_EXPR, truth_type,
4650 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4652 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4654 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4655 return build2 (code, truth_type, lhs, rhs);
4658 /* See if the comparisons can be merged. Then get all the parameters for
4661 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4662 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4666 ll_inner = decode_field_reference (ll_arg,
4667 &ll_bitsize, &ll_bitpos, &ll_mode,
4668 &ll_unsignedp, &volatilep, &ll_mask,
4670 lr_inner = decode_field_reference (lr_arg,
4671 &lr_bitsize, &lr_bitpos, &lr_mode,
4672 &lr_unsignedp, &volatilep, &lr_mask,
4674 rl_inner = decode_field_reference (rl_arg,
4675 &rl_bitsize, &rl_bitpos, &rl_mode,
4676 &rl_unsignedp, &volatilep, &rl_mask,
4678 rr_inner = decode_field_reference (rr_arg,
4679 &rr_bitsize, &rr_bitpos, &rr_mode,
4680 &rr_unsignedp, &volatilep, &rr_mask,
4683 /* It must be true that the inner operation on the lhs of each
4684 comparison must be the same if we are to be able to do anything.
4685 Then see if we have constants. If not, the same must be true for
4687 if (volatilep || ll_inner == 0 || rl_inner == 0
4688 || ! operand_equal_p (ll_inner, rl_inner, 0))
4691 if (TREE_CODE (lr_arg) == INTEGER_CST
4692 && TREE_CODE (rr_arg) == INTEGER_CST)
4693 l_const = lr_arg, r_const = rr_arg;
4694 else if (lr_inner == 0 || rr_inner == 0
4695 || ! operand_equal_p (lr_inner, rr_inner, 0))
4698 l_const = r_const = 0;
4700 /* If either comparison code is not correct for our logical operation,
4701 fail. However, we can convert a one-bit comparison against zero into
4702 the opposite comparison against that bit being set in the field. */
4704 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4705 if (lcode != wanted_code)
4707 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4709 /* Make the left operand unsigned, since we are only interested
4710 in the value of one bit. Otherwise we are doing the wrong
4719 /* This is analogous to the code for l_const above. */
4720 if (rcode != wanted_code)
4722 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4731 /* After this point all optimizations will generate bit-field
4732 references, which we might not want. */
4733 if (! lang_hooks.can_use_bit_fields_p ())
4736 /* See if we can find a mode that contains both fields being compared on
4737 the left. If we can't, fail. Otherwise, update all constants and masks
4738 to be relative to a field of that size. */
4739 first_bit = MIN (ll_bitpos, rl_bitpos);
4740 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4741 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4742 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4744 if (lnmode == VOIDmode)
4747 lnbitsize = GET_MODE_BITSIZE (lnmode);
4748 lnbitpos = first_bit & ~ (lnbitsize - 1);
4749 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4750 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4752 if (BYTES_BIG_ENDIAN)
4754 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4755 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4758 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4759 size_int (xll_bitpos), 0);
4760 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4761 size_int (xrl_bitpos), 0);
4765 l_const = fold_convert (lntype, l_const);
4766 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4767 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4768 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4769 fold (build1 (BIT_NOT_EXPR,
4773 warning ("comparison is always %d", wanted_code == NE_EXPR);
4775 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4780 r_const = fold_convert (lntype, r_const);
4781 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4782 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4783 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4784 fold (build1 (BIT_NOT_EXPR,
4788 warning ("comparison is always %d", wanted_code == NE_EXPR);
4790 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4794 /* If the right sides are not constant, do the same for it. Also,
4795 disallow this optimization if a size or signedness mismatch occurs
4796 between the left and right sides. */
4799 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4800 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4801 /* Make sure the two fields on the right
4802 correspond to the left without being swapped. */
4803 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4806 first_bit = MIN (lr_bitpos, rr_bitpos);
4807 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4808 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4809 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4811 if (rnmode == VOIDmode)
4814 rnbitsize = GET_MODE_BITSIZE (rnmode);
4815 rnbitpos = first_bit & ~ (rnbitsize - 1);
4816 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4817 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4819 if (BYTES_BIG_ENDIAN)
4821 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4822 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4825 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4826 size_int (xlr_bitpos), 0);
4827 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4828 size_int (xrr_bitpos), 0);
4830 /* Make a mask that corresponds to both fields being compared.
4831 Do this for both items being compared. If the operands are the
4832 same size and the bits being compared are in the same position
4833 then we can do this by masking both and comparing the masked
4835 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4836 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4837 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4839 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4840 ll_unsignedp || rl_unsignedp);
4841 if (! all_ones_mask_p (ll_mask, lnbitsize))
4842 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4844 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4845 lr_unsignedp || rr_unsignedp);
4846 if (! all_ones_mask_p (lr_mask, rnbitsize))
4847 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4849 return build2 (wanted_code, truth_type, lhs, rhs);
4852 /* There is still another way we can do something: If both pairs of
4853 fields being compared are adjacent, we may be able to make a wider
4854 field containing them both.
4856 Note that we still must mask the lhs/rhs expressions. Furthermore,
4857 the mask must be shifted to account for the shift done by
4858 make_bit_field_ref. */
4859 if ((ll_bitsize + ll_bitpos == rl_bitpos
4860 && lr_bitsize + lr_bitpos == rr_bitpos)
4861 || (ll_bitpos == rl_bitpos + rl_bitsize
4862 && lr_bitpos == rr_bitpos + rr_bitsize))
4866 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4867 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4868 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4869 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4871 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4872 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4873 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4874 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4876 /* Convert to the smaller type before masking out unwanted bits. */
4878 if (lntype != rntype)
4880 if (lnbitsize > rnbitsize)
4882 lhs = fold_convert (rntype, lhs);
4883 ll_mask = fold_convert (rntype, ll_mask);
4886 else if (lnbitsize < rnbitsize)
4888 rhs = fold_convert (lntype, rhs);
4889 lr_mask = fold_convert (lntype, lr_mask);
4894 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4895 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4897 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4898 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4900 return build2 (wanted_code, truth_type, lhs, rhs);
4906 /* Handle the case of comparisons with constants. If there is something in
4907 common between the masks, those bits of the constants must be the same.
4908 If not, the condition is always false. Test for this to avoid generating
4909 incorrect code below. */
4910 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4911 if (! integer_zerop (result)
4912 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4913 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4915 if (wanted_code == NE_EXPR)
4917 warning ("%<or%> of unmatched not-equal tests is always 1");
4918 return constant_boolean_node (true, truth_type);
4922 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4923 return constant_boolean_node (false, truth_type);
4927 /* Construct the expression we will return. First get the component
4928 reference we will make. Unless the mask is all ones the width of
4929 that field, perform the mask operation. Then compare with the
4931 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4932 ll_unsignedp || rl_unsignedp);
4934 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4935 if (! all_ones_mask_p (ll_mask, lnbitsize))
4936 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4938 return build2 (wanted_code, truth_type, result,
4939 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4942 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4946 optimize_minmax_comparison (tree t)
4948 tree type = TREE_TYPE (t);
4949 tree arg0 = TREE_OPERAND (t, 0);
4950 enum tree_code op_code;
4951 tree comp_const = TREE_OPERAND (t, 1);
4953 int consts_equal, consts_lt;
4956 STRIP_SIGN_NOPS (arg0);
4958 op_code = TREE_CODE (arg0);
4959 minmax_const = TREE_OPERAND (arg0, 1);
4960 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4961 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4962 inner = TREE_OPERAND (arg0, 0);
4964 /* If something does not permit us to optimize, return the original tree. */
4965 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4966 || TREE_CODE (comp_const) != INTEGER_CST
4967 || TREE_CONSTANT_OVERFLOW (comp_const)
4968 || TREE_CODE (minmax_const) != INTEGER_CST
4969 || TREE_CONSTANT_OVERFLOW (minmax_const))
4972 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4973 and GT_EXPR, doing the rest with recursive calls using logical
4975 switch (TREE_CODE (t))
4977 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4979 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4983 fold (build2 (TRUTH_ORIF_EXPR, type,
4984 optimize_minmax_comparison
4985 (build2 (EQ_EXPR, type, arg0, comp_const)),
4986 optimize_minmax_comparison
4987 (build2 (GT_EXPR, type, arg0, comp_const))));
4990 if (op_code == MAX_EXPR && consts_equal)
4991 /* MAX (X, 0) == 0 -> X <= 0 */
4992 return fold (build2 (LE_EXPR, type, inner, comp_const));
4994 else if (op_code == MAX_EXPR && consts_lt)
4995 /* MAX (X, 0) == 5 -> X == 5 */
4996 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4998 else if (op_code == MAX_EXPR)
4999 /* MAX (X, 0) == -1 -> false */
5000 return omit_one_operand (type, integer_zero_node, inner);
5002 else if (consts_equal)
5003 /* MIN (X, 0) == 0 -> X >= 0 */
5004 return fold (build2 (GE_EXPR, type, inner, comp_const));
5007 /* MIN (X, 0) == 5 -> false */
5008 return omit_one_operand (type, integer_zero_node, inner);
5011 /* MIN (X, 0) == -1 -> X == -1 */
5012 return fold (build2 (EQ_EXPR, type, inner, comp_const));
5015 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5016 /* MAX (X, 0) > 0 -> X > 0
5017 MAX (X, 0) > 5 -> X > 5 */
5018 return fold (build2 (GT_EXPR, type, inner, comp_const));
5020 else if (op_code == MAX_EXPR)
5021 /* MAX (X, 0) > -1 -> true */
5022 return omit_one_operand (type, integer_one_node, inner);
5024 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5025 /* MIN (X, 0) > 0 -> false
5026 MIN (X, 0) > 5 -> false */
5027 return omit_one_operand (type, integer_zero_node, inner);
5030 /* MIN (X, 0) > -1 -> X > -1 */
5031 return fold (build2 (GT_EXPR, type, inner, comp_const));
5038 /* T is an integer expression that is being multiplied, divided, or taken a
5039 modulus (CODE says which and what kind of divide or modulus) by a
5040 constant C. See if we can eliminate that operation by folding it with
5041 other operations already in T. WIDE_TYPE, if non-null, is a type that
5042 should be used for the computation if wider than our type.
5044 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5045 (X * 2) + (Y * 4). We must, however, be assured that either the original
5046 expression would not overflow or that overflow is undefined for the type
5047 in the language in question.
5049 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5050 the machine has a multiply-accumulate insn or that this is part of an
5051 addressing calculation.
5053 If we return a non-null expression, it is an equivalent form of the
5054 original computation, but need not be in the original type. */
5057 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5059 /* To avoid exponential search depth, refuse to allow recursion past
5060 three levels. Beyond that (1) it's highly unlikely that we'll find
5061 something interesting and (2) we've probably processed it before
5062 when we built the inner expression. */
5071 ret = extract_muldiv_1 (t, c, code, wide_type);
5078 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5080 tree type = TREE_TYPE (t);
5081 enum tree_code tcode = TREE_CODE (t);
5082 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5083 > GET_MODE_SIZE (TYPE_MODE (type)))
5084 ? wide_type : type);
5086 int same_p = tcode == code;
5087 tree op0 = NULL_TREE, op1 = NULL_TREE;
5089 /* Don't deal with constants of zero here; they confuse the code below. */
5090 if (integer_zerop (c))
5093 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5094 op0 = TREE_OPERAND (t, 0);
5096 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5097 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5099 /* Note that we need not handle conditional operations here since fold
5100 already handles those cases. So just do arithmetic here. */
5104 /* For a constant, we can always simplify if we are a multiply
5105 or (for divide and modulus) if it is a multiple of our constant. */
5106 if (code == MULT_EXPR
5107 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5108 return const_binop (code, fold_convert (ctype, t),
5109 fold_convert (ctype, c), 0);
5112 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5113 /* If op0 is an expression ... */
5114 if ((COMPARISON_CLASS_P (op0)
5115 || UNARY_CLASS_P (op0)
5116 || BINARY_CLASS_P (op0)
5117 || EXPRESSION_CLASS_P (op0))
5118 /* ... and is unsigned, and its type is smaller than ctype,
5119 then we cannot pass through as widening. */
5120 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5121 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5122 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5123 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5124 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5125 /* ... or this is a truncation (t is narrower than op0),
5126 then we cannot pass through this narrowing. */
5127 || (GET_MODE_SIZE (TYPE_MODE (type))
5128 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5129 /* ... or signedness changes for division or modulus,
5130 then we cannot pass through this conversion. */
5131 || (code != MULT_EXPR
5132 && (TYPE_UNSIGNED (ctype)
5133 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5136 /* Pass the constant down and see if we can make a simplification. If
5137 we can, replace this expression with the inner simplification for
5138 possible later conversion to our or some other type. */
5139 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5140 && TREE_CODE (t2) == INTEGER_CST
5141 && ! TREE_CONSTANT_OVERFLOW (t2)
5142 && (0 != (t1 = extract_muldiv (op0, t2, code,
5144 ? ctype : NULL_TREE))))
5149 /* If widening the type changes it from signed to unsigned, then we
5150 must avoid building ABS_EXPR itself as unsigned. */
5151 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5153 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5154 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5156 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5157 return fold_convert (ctype, t1);
5163 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5164 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5167 case MIN_EXPR: case MAX_EXPR:
5168 /* If widening the type changes the signedness, then we can't perform
5169 this optimization as that changes the result. */
5170 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5173 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5174 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5175 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5177 if (tree_int_cst_sgn (c) < 0)
5178 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5180 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5181 fold_convert (ctype, t2)));
5185 case LSHIFT_EXPR: case RSHIFT_EXPR:
5186 /* If the second operand is constant, this is a multiplication
5187 or floor division, by a power of two, so we can treat it that
5188 way unless the multiplier or divisor overflows. Signed
5189 left-shift overflow is implementation-defined rather than
5190 undefined in C90, so do not convert signed left shift into
5192 if (TREE_CODE (op1) == INTEGER_CST
5193 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5194 /* const_binop may not detect overflow correctly,
5195 so check for it explicitly here. */
5196 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5197 && TREE_INT_CST_HIGH (op1) == 0
5198 && 0 != (t1 = fold_convert (ctype,
5199 const_binop (LSHIFT_EXPR,
5202 && ! TREE_OVERFLOW (t1))
5203 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5204 ? MULT_EXPR : FLOOR_DIV_EXPR,
5205 ctype, fold_convert (ctype, op0), t1),
5206 c, code, wide_type);
5209 case PLUS_EXPR: case MINUS_EXPR:
5210 /* See if we can eliminate the operation on both sides. If we can, we
5211 can return a new PLUS or MINUS. If we can't, the only remaining
5212 cases where we can do anything are if the second operand is a
5214 t1 = extract_muldiv (op0, c, code, wide_type);
5215 t2 = extract_muldiv (op1, c, code, wide_type);
5216 if (t1 != 0 && t2 != 0
5217 && (code == MULT_EXPR
5218 /* If not multiplication, we can only do this if both operands
5219 are divisible by c. */
5220 || (multiple_of_p (ctype, op0, c)
5221 && multiple_of_p (ctype, op1, c))))
5222 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5223 fold_convert (ctype, t2)));
5225 /* If this was a subtraction, negate OP1 and set it to be an addition.
5226 This simplifies the logic below. */
5227 if (tcode == MINUS_EXPR)
5228 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5230 if (TREE_CODE (op1) != INTEGER_CST)
5233 /* If either OP1 or C are negative, this optimization is not safe for
5234 some of the division and remainder types while for others we need
5235 to change the code. */
5236 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5238 if (code == CEIL_DIV_EXPR)
5239 code = FLOOR_DIV_EXPR;
5240 else if (code == FLOOR_DIV_EXPR)
5241 code = CEIL_DIV_EXPR;
5242 else if (code != MULT_EXPR
5243 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5247 /* If it's a multiply or a division/modulus operation of a multiple
5248 of our constant, do the operation and verify it doesn't overflow. */
5249 if (code == MULT_EXPR
5250 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5252 op1 = const_binop (code, fold_convert (ctype, op1),
5253 fold_convert (ctype, c), 0);
5254 /* We allow the constant to overflow with wrapping semantics. */
5256 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5262 /* If we have an unsigned type is not a sizetype, we cannot widen
5263 the operation since it will change the result if the original
5264 computation overflowed. */
5265 if (TYPE_UNSIGNED (ctype)
5266 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5270 /* If we were able to eliminate our operation from the first side,
5271 apply our operation to the second side and reform the PLUS. */
5272 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5273 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5275 /* The last case is if we are a multiply. In that case, we can
5276 apply the distributive law to commute the multiply and addition
5277 if the multiplication of the constants doesn't overflow. */
5278 if (code == MULT_EXPR)
5279 return fold (build2 (tcode, ctype,
5280 fold (build2 (code, ctype,
5281 fold_convert (ctype, op0),
5282 fold_convert (ctype, c))),
5288 /* We have a special case here if we are doing something like
5289 (C * 8) % 4 since we know that's zero. */
5290 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5291 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5292 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5293 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5294 return omit_one_operand (type, integer_zero_node, op0);
5296 /* ... fall through ... */
5298 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5299 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5300 /* If we can extract our operation from the LHS, do so and return a
5301 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5302 do something only if the second operand is a constant. */
5304 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5305 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5306 fold_convert (ctype, op1)));
5307 else if (tcode == MULT_EXPR && code == MULT_EXPR
5308 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5309 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5310 fold_convert (ctype, t1)));
5311 else if (TREE_CODE (op1) != INTEGER_CST)
5314 /* If these are the same operation types, we can associate them
5315 assuming no overflow. */
5317 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5318 fold_convert (ctype, c), 0))
5319 && ! TREE_OVERFLOW (t1))
5320 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5322 /* If these operations "cancel" each other, we have the main
5323 optimizations of this pass, which occur when either constant is a
5324 multiple of the other, in which case we replace this with either an
5325 operation or CODE or TCODE.
5327 If we have an unsigned type that is not a sizetype, we cannot do
5328 this since it will change the result if the original computation
5330 if ((! TYPE_UNSIGNED (ctype)
5331 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5333 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5334 || (tcode == MULT_EXPR
5335 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5336 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5338 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5339 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5340 fold_convert (ctype,
5341 const_binop (TRUNC_DIV_EXPR,
5343 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5344 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5345 fold_convert (ctype,
5346 const_binop (TRUNC_DIV_EXPR,
5358 /* Return a node which has the indicated constant VALUE (either 0 or
5359 1), and is of the indicated TYPE. */
5362 constant_boolean_node (int value, tree type)
5364 if (type == integer_type_node)
5365 return value ? integer_one_node : integer_zero_node;
5366 else if (type == boolean_type_node)
5367 return value ? boolean_true_node : boolean_false_node;
5368 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5369 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5370 : integer_zero_node);
5372 return build_int_cst (type, value);
5375 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5376 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5377 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5378 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5379 COND is the first argument to CODE; otherwise (as in the example
5380 given here), it is the second argument. TYPE is the type of the
5381 original expression. Return NULL_TREE if no simplification is
5385 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5386 tree cond, tree arg, int cond_first_p)
5388 tree test, true_value, false_value;
5389 tree lhs = NULL_TREE;
5390 tree rhs = NULL_TREE;
5392 /* This transformation is only worthwhile if we don't have to wrap
5393 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5394 one of the branches once its pushed inside the COND_EXPR. */
5395 if (!TREE_CONSTANT (arg))
5398 if (TREE_CODE (cond) == COND_EXPR)
5400 test = TREE_OPERAND (cond, 0);
5401 true_value = TREE_OPERAND (cond, 1);
5402 false_value = TREE_OPERAND (cond, 2);
5403 /* If this operand throws an expression, then it does not make
5404 sense to try to perform a logical or arithmetic operation
5406 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5408 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5413 tree testtype = TREE_TYPE (cond);
5415 true_value = constant_boolean_node (true, testtype);
5416 false_value = constant_boolean_node (false, testtype);
5420 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5421 : build2 (code, type, arg, true_value));
5423 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5424 : build2 (code, type, arg, false_value));
5426 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5427 return fold_convert (type, test);
5431 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5433 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5434 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5435 ADDEND is the same as X.
5437 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5438 and finite. The problematic cases are when X is zero, and its mode
5439 has signed zeros. In the case of rounding towards -infinity,
5440 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5441 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5444 fold_real_zero_addition_p (tree type, tree addend, int negate)
5446 if (!real_zerop (addend))
5449 /* Don't allow the fold with -fsignaling-nans. */
5450 if (HONOR_SNANS (TYPE_MODE (type)))
5453 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5454 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5457 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5458 if (TREE_CODE (addend) == REAL_CST
5459 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5462 /* The mode has signed zeros, and we have to honor their sign.
5463 In this situation, there is only one case we can return true for.
5464 X - 0 is the same as X unless rounding towards -infinity is
5466 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5469 /* Subroutine of fold() that checks comparisons of built-in math
5470 functions against real constants.
5472 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5473 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5474 is the type of the result and ARG0 and ARG1 are the operands of the
5475 comparison. ARG1 must be a TREE_REAL_CST.
5477 The function returns the constant folded tree if a simplification
5478 can be made, and NULL_TREE otherwise. */
5481 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5482 tree type, tree arg0, tree arg1)
5486 if (BUILTIN_SQRT_P (fcode))
5488 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5489 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5491 c = TREE_REAL_CST (arg1);
5492 if (REAL_VALUE_NEGATIVE (c))
5494 /* sqrt(x) < y is always false, if y is negative. */
5495 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5496 return omit_one_operand (type, integer_zero_node, arg);
5498 /* sqrt(x) > y is always true, if y is negative and we
5499 don't care about NaNs, i.e. negative values of x. */
5500 if (code == NE_EXPR || !HONOR_NANS (mode))
5501 return omit_one_operand (type, integer_one_node, arg);
5503 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5504 return fold (build2 (GE_EXPR, type, arg,
5505 build_real (TREE_TYPE (arg), dconst0)));
5507 else if (code == GT_EXPR || code == GE_EXPR)
5511 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5512 real_convert (&c2, mode, &c2);
5514 if (REAL_VALUE_ISINF (c2))
5516 /* sqrt(x) > y is x == +Inf, when y is very large. */
5517 if (HONOR_INFINITIES (mode))
5518 return fold (build2 (EQ_EXPR, type, arg,
5519 build_real (TREE_TYPE (arg), c2)));
5521 /* sqrt(x) > y is always false, when y is very large
5522 and we don't care about infinities. */
5523 return omit_one_operand (type, integer_zero_node, arg);
5526 /* sqrt(x) > c is the same as x > c*c. */
5527 return fold (build2 (code, type, arg,
5528 build_real (TREE_TYPE (arg), c2)));
5530 else if (code == LT_EXPR || code == LE_EXPR)
5534 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5535 real_convert (&c2, mode, &c2);
5537 if (REAL_VALUE_ISINF (c2))
5539 /* sqrt(x) < y is always true, when y is a very large
5540 value and we don't care about NaNs or Infinities. */
5541 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5542 return omit_one_operand (type, integer_one_node, arg);
5544 /* sqrt(x) < y is x != +Inf when y is very large and we
5545 don't care about NaNs. */
5546 if (! HONOR_NANS (mode))
5547 return fold (build2 (NE_EXPR, type, arg,
5548 build_real (TREE_TYPE (arg), c2)));
5550 /* sqrt(x) < y is x >= 0 when y is very large and we
5551 don't care about Infinities. */
5552 if (! HONOR_INFINITIES (mode))
5553 return fold (build2 (GE_EXPR, type, arg,
5554 build_real (TREE_TYPE (arg), dconst0)));
5556 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5557 if (lang_hooks.decls.global_bindings_p () != 0
5558 || CONTAINS_PLACEHOLDER_P (arg))
5561 arg = save_expr (arg);
5562 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5563 fold (build2 (GE_EXPR, type, arg,
5564 build_real (TREE_TYPE (arg),
5566 fold (build2 (NE_EXPR, type, arg,
5567 build_real (TREE_TYPE (arg),
5571 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5572 if (! HONOR_NANS (mode))
5573 return fold (build2 (code, type, arg,
5574 build_real (TREE_TYPE (arg), c2)));
5576 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5577 if (lang_hooks.decls.global_bindings_p () == 0
5578 && ! CONTAINS_PLACEHOLDER_P (arg))
5580 arg = save_expr (arg);
5581 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5582 fold (build2 (GE_EXPR, type, arg,
5583 build_real (TREE_TYPE (arg),
5585 fold (build2 (code, type, arg,
5586 build_real (TREE_TYPE (arg),
5595 /* Subroutine of fold() that optimizes comparisons against Infinities,
5596 either +Inf or -Inf.
5598 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5599 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5600 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5602 The function returns the constant folded tree if a simplification
5603 can be made, and NULL_TREE otherwise. */
5606 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5608 enum machine_mode mode;
5609 REAL_VALUE_TYPE max;
5613 mode = TYPE_MODE (TREE_TYPE (arg0));
5615 /* For negative infinity swap the sense of the comparison. */
5616 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5618 code = swap_tree_comparison (code);
5623 /* x > +Inf is always false, if with ignore sNANs. */
5624 if (HONOR_SNANS (mode))
5626 return omit_one_operand (type, integer_zero_node, arg0);
5629 /* x <= +Inf is always true, if we don't case about NaNs. */
5630 if (! HONOR_NANS (mode))
5631 return omit_one_operand (type, integer_one_node, arg0);
5633 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5634 if (lang_hooks.decls.global_bindings_p () == 0
5635 && ! CONTAINS_PLACEHOLDER_P (arg0))
5637 arg0 = save_expr (arg0);
5638 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5644 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5645 real_maxval (&max, neg, mode);
5646 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5647 arg0, build_real (TREE_TYPE (arg0), max)));
5650 /* x < +Inf is always equal to x <= DBL_MAX. */
5651 real_maxval (&max, neg, mode);
5652 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5653 arg0, build_real (TREE_TYPE (arg0), max)));
5656 /* x != +Inf is always equal to !(x > DBL_MAX). */
5657 real_maxval (&max, neg, mode);
5658 if (! HONOR_NANS (mode))
5659 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5660 arg0, build_real (TREE_TYPE (arg0), max)));
5662 /* The transformation below creates non-gimple code and thus is
5663 not appropriate if we are in gimple form. */
5667 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5668 arg0, build_real (TREE_TYPE (arg0), max)));
5669 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5678 /* Subroutine of fold() that optimizes comparisons of a division by
5679 a nonzero integer constant against an integer constant, i.e.
5682 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5683 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5684 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5686 The function returns the constant folded tree if a simplification
5687 can be made, and NULL_TREE otherwise. */
5690 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5692 tree prod, tmp, hi, lo;
5693 tree arg00 = TREE_OPERAND (arg0, 0);
5694 tree arg01 = TREE_OPERAND (arg0, 1);
5695 unsigned HOST_WIDE_INT lpart;
5696 HOST_WIDE_INT hpart;
5699 /* We have to do this the hard way to detect unsigned overflow.
5700 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5701 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5702 TREE_INT_CST_HIGH (arg01),
5703 TREE_INT_CST_LOW (arg1),
5704 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5705 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5706 prod = force_fit_type (prod, -1, overflow, false);
5708 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5710 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5713 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5714 overflow = add_double (TREE_INT_CST_LOW (prod),
5715 TREE_INT_CST_HIGH (prod),
5716 TREE_INT_CST_LOW (tmp),
5717 TREE_INT_CST_HIGH (tmp),
5719 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5720 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5721 TREE_CONSTANT_OVERFLOW (prod));
5723 else if (tree_int_cst_sgn (arg01) >= 0)
5725 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5726 switch (tree_int_cst_sgn (arg1))
5729 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5734 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5739 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5749 /* A negative divisor reverses the relational operators. */
5750 code = swap_tree_comparison (code);
5752 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5753 switch (tree_int_cst_sgn (arg1))
5756 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5761 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5766 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5778 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5779 return omit_one_operand (type, integer_zero_node, arg00);
5780 if (TREE_OVERFLOW (hi))
5781 return fold (build2 (GE_EXPR, type, arg00, lo));
5782 if (TREE_OVERFLOW (lo))
5783 return fold (build2 (LE_EXPR, type, arg00, hi));
5784 return build_range_check (type, arg00, 1, lo, hi);
5787 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5788 return omit_one_operand (type, integer_one_node, arg00);
5789 if (TREE_OVERFLOW (hi))
5790 return fold (build2 (LT_EXPR, type, arg00, lo));
5791 if (TREE_OVERFLOW (lo))
5792 return fold (build2 (GT_EXPR, type, arg00, hi));
5793 return build_range_check (type, arg00, 0, lo, hi);
5796 if (TREE_OVERFLOW (lo))
5797 return omit_one_operand (type, integer_zero_node, arg00);
5798 return fold (build2 (LT_EXPR, type, arg00, lo));
5801 if (TREE_OVERFLOW (hi))
5802 return omit_one_operand (type, integer_one_node, arg00);
5803 return fold (build2 (LE_EXPR, type, arg00, hi));
5806 if (TREE_OVERFLOW (hi))
5807 return omit_one_operand (type, integer_zero_node, arg00);
5808 return fold (build2 (GT_EXPR, type, arg00, hi));
5811 if (TREE_OVERFLOW (lo))
5812 return omit_one_operand (type, integer_one_node, arg00);
5813 return fold (build2 (GE_EXPR, type, arg00, lo));
5823 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5824 equality/inequality test, then return a simplified form of
5825 the test using shifts and logical operations. Otherwise return
5826 NULL. TYPE is the desired result type. */
5829 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5832 /* If this is testing a single bit, we can optimize the test. */
5833 if ((code == NE_EXPR || code == EQ_EXPR)
5834 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5835 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5837 tree inner = TREE_OPERAND (arg0, 0);
5838 tree type = TREE_TYPE (arg0);
5839 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5840 enum machine_mode operand_mode = TYPE_MODE (type);
5842 tree signed_type, unsigned_type, intermediate_type;
5845 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5846 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5847 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5848 if (arg00 != NULL_TREE
5849 /* This is only a win if casting to a signed type is cheap,
5850 i.e. when arg00's type is not a partial mode. */
5851 && TYPE_PRECISION (TREE_TYPE (arg00))
5852 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5854 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5855 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5856 result_type, fold_convert (stype, arg00),
5857 fold_convert (stype, integer_zero_node)));
5860 /* Otherwise we have (A & C) != 0 where C is a single bit,
5861 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5862 Similarly for (A & C) == 0. */
5864 /* If INNER is a right shift of a constant and it plus BITNUM does
5865 not overflow, adjust BITNUM and INNER. */
5866 if (TREE_CODE (inner) == RSHIFT_EXPR
5867 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5868 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5869 && bitnum < TYPE_PRECISION (type)
5870 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5871 bitnum - TYPE_PRECISION (type)))
5873 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5874 inner = TREE_OPERAND (inner, 0);
5877 /* If we are going to be able to omit the AND below, we must do our
5878 operations as unsigned. If we must use the AND, we have a choice.
5879 Normally unsigned is faster, but for some machines signed is. */
5880 #ifdef LOAD_EXTEND_OP
5881 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5882 && !flag_syntax_only) ? 0 : 1;
5887 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5888 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5889 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5890 inner = fold_convert (intermediate_type, inner);
5893 inner = build2 (RSHIFT_EXPR, intermediate_type,
5894 inner, size_int (bitnum));
5896 if (code == EQ_EXPR)
5897 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5898 inner, integer_one_node));
5900 /* Put the AND last so it can combine with more things. */
5901 inner = build2 (BIT_AND_EXPR, intermediate_type,
5902 inner, integer_one_node);
5904 /* Make sure to return the proper type. */
5905 inner = fold_convert (result_type, inner);
5912 /* Check whether we are allowed to reorder operands arg0 and arg1,
5913 such that the evaluation of arg1 occurs before arg0. */
5916 reorder_operands_p (tree arg0, tree arg1)
5918 if (! flag_evaluation_order)
5920 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5922 return ! TREE_SIDE_EFFECTS (arg0)
5923 && ! TREE_SIDE_EFFECTS (arg1);
5926 /* Test whether it is preferable two swap two operands, ARG0 and
5927 ARG1, for example because ARG0 is an integer constant and ARG1
5928 isn't. If REORDER is true, only recommend swapping if we can
5929 evaluate the operands in reverse order. */
5932 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5934 STRIP_SIGN_NOPS (arg0);
5935 STRIP_SIGN_NOPS (arg1);
5937 if (TREE_CODE (arg1) == INTEGER_CST)
5939 if (TREE_CODE (arg0) == INTEGER_CST)
5942 if (TREE_CODE (arg1) == REAL_CST)
5944 if (TREE_CODE (arg0) == REAL_CST)
5947 if (TREE_CODE (arg1) == COMPLEX_CST)
5949 if (TREE_CODE (arg0) == COMPLEX_CST)
5952 if (TREE_CONSTANT (arg1))
5954 if (TREE_CONSTANT (arg0))
5960 if (reorder && flag_evaluation_order
5961 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5969 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5970 for commutative and comparison operators. Ensuring a canonical
5971 form allows the optimizers to find additional redundancies without
5972 having to explicitly check for both orderings. */
5973 if (TREE_CODE (arg0) == SSA_NAME
5974 && TREE_CODE (arg1) == SSA_NAME
5975 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5981 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
5982 ARG0 is extended to a wider type. */
5985 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
5987 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
5989 tree shorter_type, outer_type;
5993 if (arg0_unw == arg0)
5995 shorter_type = TREE_TYPE (arg0_unw);
5997 arg1_unw = get_unwidened (arg1, shorter_type);
6001 /* If possible, express the comparison in the shorter mode. */
6002 if ((code == EQ_EXPR || code == NE_EXPR
6003 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6004 && (TREE_TYPE (arg1_unw) == shorter_type
6005 || (TREE_CODE (arg1_unw) == INTEGER_CST
6006 && TREE_CODE (shorter_type) == INTEGER_TYPE
6007 && int_fits_type_p (arg1_unw, shorter_type))))
6008 return fold (build (code, type, arg0_unw,
6009 fold_convert (shorter_type, arg1_unw)));
6011 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6014 /* If we are comparing with the integer that does not fit into the range
6015 of the shorter type, the result is known. */
6016 outer_type = TREE_TYPE (arg1_unw);
6017 min = lower_bound_in_type (outer_type, shorter_type);
6018 max = upper_bound_in_type (outer_type, shorter_type);
6020 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6022 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6029 return omit_one_operand (type, integer_zero_node, arg0);
6034 return omit_one_operand (type, integer_one_node, arg0);
6040 return omit_one_operand (type, integer_one_node, arg0);
6042 return omit_one_operand (type, integer_zero_node, arg0);
6047 return omit_one_operand (type, integer_zero_node, arg0);
6049 return omit_one_operand (type, integer_one_node, arg0);
6058 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6059 ARG0 just the signedness is changed. */
6062 fold_sign_changed_comparison (enum tree_code code, tree type,
6063 tree arg0, tree arg1)
6065 tree arg0_inner, tmp;
6066 tree inner_type, outer_type;
6068 if (TREE_CODE (arg0) != NOP_EXPR)
6071 outer_type = TREE_TYPE (arg0);
6072 arg0_inner = TREE_OPERAND (arg0, 0);
6073 inner_type = TREE_TYPE (arg0_inner);
6075 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6078 if (TREE_CODE (arg1) != INTEGER_CST
6079 && !(TREE_CODE (arg1) == NOP_EXPR
6080 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6083 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6088 if (TREE_CODE (arg1) == INTEGER_CST)
6090 tmp = build_int_cst_wide (inner_type,
6091 TREE_INT_CST_LOW (arg1),
6092 TREE_INT_CST_HIGH (arg1));
6093 arg1 = force_fit_type (tmp, 0,
6094 TREE_OVERFLOW (arg1),
6095 TREE_CONSTANT_OVERFLOW (arg1));
6098 arg1 = fold_convert (inner_type, arg1);
6100 return fold (build (code, type, arg0_inner, arg1));
6103 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6104 step of the array. TYPE is the type of the expression. ADDR is the address.
6105 MULT is the multiplicative expression. If the function succeeds, the new
6106 address expression is returned. Otherwise NULL_TREE is returned. */
6109 try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
6111 tree s, delta, step;
6112 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6113 tree ref = TREE_OPERAND (addr, 0), pref;
6120 if (TREE_CODE (arg0) == INTEGER_CST)
6125 else if (TREE_CODE (arg1) == INTEGER_CST)
6133 for (;; ref = TREE_OPERAND (ref, 0))
6135 if (TREE_CODE (ref) == ARRAY_REF)
6137 step = array_ref_element_size (ref);
6139 if (TREE_CODE (step) != INTEGER_CST)
6142 itype = TREE_TYPE (step);
6144 /* If the type sizes do not match, we might run into problems
6145 when one of them would overflow. */
6146 if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
6149 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6152 delta = fold_convert (itype, delta);
6156 if (!handled_component_p (ref))
6160 /* We found the suitable array reference. So copy everything up to it,
6161 and replace the index. */
6163 pref = TREE_OPERAND (addr, 0);
6164 ret = copy_node (pref);
6169 pref = TREE_OPERAND (pref, 0);
6170 TREE_OPERAND (pos, 0) = copy_node (pref);
6171 pos = TREE_OPERAND (pos, 0);
6174 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6175 TREE_OPERAND (pos, 1),
6178 return build1 (ADDR_EXPR, type, ret);
6182 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6183 means A >= Y && A != MAX, but in this case we know that
6184 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6187 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6189 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6191 if (TREE_CODE (bound) == LT_EXPR)
6192 a = TREE_OPERAND (bound, 0);
6193 else if (TREE_CODE (bound) == GT_EXPR)
6194 a = TREE_OPERAND (bound, 1);
6198 typea = TREE_TYPE (a);
6199 if (!INTEGRAL_TYPE_P (typea)
6200 && !POINTER_TYPE_P (typea))
6203 if (TREE_CODE (ineq) == LT_EXPR)
6205 a1 = TREE_OPERAND (ineq, 1);
6206 y = TREE_OPERAND (ineq, 0);
6208 else if (TREE_CODE (ineq) == GT_EXPR)
6210 a1 = TREE_OPERAND (ineq, 0);
6211 y = TREE_OPERAND (ineq, 1);
6216 if (TREE_TYPE (a1) != typea)
6219 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6220 if (!integer_onep (diff))
6223 return fold (build2 (GE_EXPR, type, a, y));
6226 /* Perform constant folding and related simplification of EXPR.
6227 The related simplifications include x*1 => x, x*0 => 0, etc.,
6228 and application of the associative law.
6229 NOP_EXPR conversions may be removed freely (as long as we
6230 are careful not to change the type of the overall expression).
6231 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6232 but we can constant-fold them if they have constant operands. */
6234 #ifdef ENABLE_FOLD_CHECKING
6235 # define fold(x) fold_1 (x)
6236 static tree fold_1 (tree);
6242 const tree t = expr;
6243 const tree type = TREE_TYPE (expr);
6244 tree t1 = NULL_TREE;
6246 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6247 enum tree_code code = TREE_CODE (t);
6248 enum tree_code_class kind = TREE_CODE_CLASS (code);
6250 /* WINS will be nonzero when the switch is done
6251 if all operands are constant. */
6254 /* Return right away if a constant. */
6255 if (kind == tcc_constant)
6258 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6262 /* Special case for conversion ops that can have fixed point args. */
6263 arg0 = TREE_OPERAND (t, 0);
6265 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6267 STRIP_SIGN_NOPS (arg0);
6269 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6270 subop = TREE_REALPART (arg0);
6274 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6275 && TREE_CODE (subop) != REAL_CST)
6276 /* Note that TREE_CONSTANT isn't enough:
6277 static var addresses are constant but we can't
6278 do arithmetic on them. */
6281 else if (IS_EXPR_CODE_CLASS (kind))
6283 int len = TREE_CODE_LENGTH (code);
6285 for (i = 0; i < len; i++)
6287 tree op = TREE_OPERAND (t, i);
6291 continue; /* Valid for CALL_EXPR, at least. */
6293 /* Strip any conversions that don't change the mode. This is
6294 safe for every expression, except for a comparison expression
6295 because its signedness is derived from its operands. So, in
6296 the latter case, only strip conversions that don't change the
6299 Note that this is done as an internal manipulation within the
6300 constant folder, in order to find the simplest representation
6301 of the arguments so that their form can be studied. In any
6302 cases, the appropriate type conversions should be put back in
6303 the tree that will get out of the constant folder. */
6304 if (kind == tcc_comparison)
6305 STRIP_SIGN_NOPS (op);
6309 if (TREE_CODE (op) == COMPLEX_CST)
6310 subop = TREE_REALPART (op);
6314 if (TREE_CODE (subop) != INTEGER_CST
6315 && TREE_CODE (subop) != REAL_CST)
6316 /* Note that TREE_CONSTANT isn't enough:
6317 static var addresses are constant but we can't
6318 do arithmetic on them. */
6328 /* If this is a commutative operation, and ARG0 is a constant, move it
6329 to ARG1 to reduce the number of tests below. */
6330 if (commutative_tree_code (code)
6331 && tree_swap_operands_p (arg0, arg1, true))
6332 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6333 TREE_OPERAND (t, 0)));
6335 /* Now WINS is set as described above,
6336 ARG0 is the first operand of EXPR,
6337 and ARG1 is the second operand (if it has more than one operand).
6339 First check for cases where an arithmetic operation is applied to a
6340 compound, conditional, or comparison operation. Push the arithmetic
6341 operation inside the compound or conditional to see if any folding
6342 can then be done. Convert comparison to conditional for this purpose.
6343 The also optimizes non-constant cases that used to be done in
6346 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6347 one of the operands is a comparison and the other is a comparison, a
6348 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6349 code below would make the expression more complex. Change it to a
6350 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6351 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6353 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6354 || code == EQ_EXPR || code == NE_EXPR)
6355 && ((truth_value_p (TREE_CODE (arg0))
6356 && (truth_value_p (TREE_CODE (arg1))
6357 || (TREE_CODE (arg1) == BIT_AND_EXPR
6358 && integer_onep (TREE_OPERAND (arg1, 1)))))
6359 || (truth_value_p (TREE_CODE (arg1))
6360 && (truth_value_p (TREE_CODE (arg0))
6361 || (TREE_CODE (arg0) == BIT_AND_EXPR
6362 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6364 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6365 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6367 type, fold_convert (boolean_type_node, arg0),
6368 fold_convert (boolean_type_node, arg1)));
6370 if (code == EQ_EXPR)
6371 tem = invert_truthvalue (tem);
6376 if (TREE_CODE_CLASS (code) == tcc_unary)
6378 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6379 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6380 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6381 else if (TREE_CODE (arg0) == COND_EXPR)
6383 tree arg01 = TREE_OPERAND (arg0, 1);
6384 tree arg02 = TREE_OPERAND (arg0, 2);
6385 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6386 arg01 = fold (build1 (code, type, arg01));
6387 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6388 arg02 = fold (build1 (code, type, arg02));
6389 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6392 /* If this was a conversion, and all we did was to move into
6393 inside the COND_EXPR, bring it back out. But leave it if
6394 it is a conversion from integer to integer and the
6395 result precision is no wider than a word since such a
6396 conversion is cheap and may be optimized away by combine,
6397 while it couldn't if it were outside the COND_EXPR. Then return
6398 so we don't get into an infinite recursion loop taking the
6399 conversion out and then back in. */
6401 if ((code == NOP_EXPR || code == CONVERT_EXPR
6402 || code == NON_LVALUE_EXPR)
6403 && TREE_CODE (tem) == COND_EXPR
6404 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6405 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6406 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6407 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6408 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6409 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6410 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6412 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6413 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6414 || flag_syntax_only))
6415 tem = build1 (code, type,
6417 TREE_TYPE (TREE_OPERAND
6418 (TREE_OPERAND (tem, 1), 0)),
6419 TREE_OPERAND (tem, 0),
6420 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6421 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6424 else if (COMPARISON_CLASS_P (arg0))
6426 if (TREE_CODE (type) == BOOLEAN_TYPE)
6428 arg0 = copy_node (arg0);
6429 TREE_TYPE (arg0) = type;
6432 else if (TREE_CODE (type) != INTEGER_TYPE)
6433 return fold (build3 (COND_EXPR, type, arg0,
6434 fold (build1 (code, type,
6436 fold (build1 (code, type,
6437 integer_zero_node))));
6440 else if (TREE_CODE_CLASS (code) == tcc_comparison
6441 && TREE_CODE (arg0) == COMPOUND_EXPR)
6442 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6443 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6444 else if (TREE_CODE_CLASS (code) == tcc_comparison
6445 && TREE_CODE (arg1) == COMPOUND_EXPR)
6446 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6447 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6448 else if (TREE_CODE_CLASS (code) == tcc_binary
6449 || TREE_CODE_CLASS (code) == tcc_comparison)
6451 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6452 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6453 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6455 if (TREE_CODE (arg1) == COMPOUND_EXPR
6456 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6457 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6458 fold (build2 (code, type,
6459 arg0, TREE_OPERAND (arg1, 1))));
6461 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6463 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6464 /*cond_first_p=*/1);
6465 if (tem != NULL_TREE)
6469 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6471 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6472 /*cond_first_p=*/0);
6473 if (tem != NULL_TREE)
6481 return fold (DECL_INITIAL (t));
6486 case FIX_TRUNC_EXPR:
6488 case FIX_FLOOR_EXPR:
6489 case FIX_ROUND_EXPR:
6490 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6491 return TREE_OPERAND (t, 0);
6493 /* Handle cases of two conversions in a row. */
6494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6495 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6497 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6498 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6499 int inside_int = INTEGRAL_TYPE_P (inside_type);
6500 int inside_ptr = POINTER_TYPE_P (inside_type);
6501 int inside_float = FLOAT_TYPE_P (inside_type);
6502 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6503 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6504 int inter_int = INTEGRAL_TYPE_P (inter_type);
6505 int inter_ptr = POINTER_TYPE_P (inter_type);
6506 int inter_float = FLOAT_TYPE_P (inter_type);
6507 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6508 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6509 int final_int = INTEGRAL_TYPE_P (type);
6510 int final_ptr = POINTER_TYPE_P (type);
6511 int final_float = FLOAT_TYPE_P (type);
6512 unsigned int final_prec = TYPE_PRECISION (type);
6513 int final_unsignedp = TYPE_UNSIGNED (type);
6515 /* In addition to the cases of two conversions in a row
6516 handled below, if we are converting something to its own
6517 type via an object of identical or wider precision, neither
6518 conversion is needed. */
6519 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6520 && ((inter_int && final_int) || (inter_float && final_float))
6521 && inter_prec >= final_prec)
6522 return fold (build1 (code, type,
6523 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6525 /* Likewise, if the intermediate and final types are either both
6526 float or both integer, we don't need the middle conversion if
6527 it is wider than the final type and doesn't change the signedness
6528 (for integers). Avoid this if the final type is a pointer
6529 since then we sometimes need the inner conversion. Likewise if
6530 the outer has a precision not equal to the size of its mode. */
6531 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6532 || (inter_float && inside_float))
6533 && inter_prec >= inside_prec
6534 && (inter_float || inter_unsignedp == inside_unsignedp)
6535 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6536 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6538 return fold (build1 (code, type,
6539 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6541 /* If we have a sign-extension of a zero-extended value, we can
6542 replace that by a single zero-extension. */
6543 if (inside_int && inter_int && final_int
6544 && inside_prec < inter_prec && inter_prec < final_prec
6545 && inside_unsignedp && !inter_unsignedp)
6546 return fold (build1 (code, type,
6547 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6549 /* Two conversions in a row are not needed unless:
6550 - some conversion is floating-point (overstrict for now), or
6551 - the intermediate type is narrower than both initial and
6553 - the intermediate type and innermost type differ in signedness,
6554 and the outermost type is wider than the intermediate, or
6555 - the initial type is a pointer type and the precisions of the
6556 intermediate and final types differ, or
6557 - the final type is a pointer type and the precisions of the
6558 initial and intermediate types differ. */
6559 if (! inside_float && ! inter_float && ! final_float
6560 && (inter_prec > inside_prec || inter_prec > final_prec)
6561 && ! (inside_int && inter_int
6562 && inter_unsignedp != inside_unsignedp
6563 && inter_prec < final_prec)
6564 && ((inter_unsignedp && inter_prec > inside_prec)
6565 == (final_unsignedp && final_prec > inter_prec))
6566 && ! (inside_ptr && inter_prec != final_prec)
6567 && ! (final_ptr && inside_prec != inter_prec)
6568 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6569 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6571 return fold (build1 (code, type,
6572 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6575 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6576 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6577 /* Detect assigning a bitfield. */
6578 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6579 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6581 /* Don't leave an assignment inside a conversion
6582 unless assigning a bitfield. */
6583 tree prev = TREE_OPERAND (t, 0);
6584 tem = copy_node (t);
6585 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6586 /* First do the assignment, then return converted constant. */
6587 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6588 TREE_NO_WARNING (tem) = 1;
6589 TREE_USED (tem) = 1;
6593 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6594 constants (if x has signed type, the sign bit cannot be set
6595 in c). This folds extension into the BIT_AND_EXPR. */
6596 if (INTEGRAL_TYPE_P (type)
6597 && TREE_CODE (type) != BOOLEAN_TYPE
6598 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6599 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6601 tree and = TREE_OPERAND (t, 0);
6602 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6605 if (TYPE_UNSIGNED (TREE_TYPE (and))
6606 || (TYPE_PRECISION (type)
6607 <= TYPE_PRECISION (TREE_TYPE (and))))
6609 else if (TYPE_PRECISION (TREE_TYPE (and1))
6610 <= HOST_BITS_PER_WIDE_INT
6611 && host_integerp (and1, 1))
6613 unsigned HOST_WIDE_INT cst;
6615 cst = tree_low_cst (and1, 1);
6616 cst &= (HOST_WIDE_INT) -1
6617 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6618 change = (cst == 0);
6619 #ifdef LOAD_EXTEND_OP
6621 && !flag_syntax_only
6622 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6625 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6626 and0 = fold_convert (uns, and0);
6627 and1 = fold_convert (uns, and1);
6632 return fold (build2 (BIT_AND_EXPR, type,
6633 fold_convert (type, and0),
6634 fold_convert (type, and1)));
6637 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6638 T2 being pointers to types of the same size. */
6639 if (POINTER_TYPE_P (TREE_TYPE (t))
6640 && BINARY_CLASS_P (arg0)
6641 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6642 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6644 tree arg00 = TREE_OPERAND (arg0, 0);
6645 tree t0 = TREE_TYPE (t);
6646 tree t1 = TREE_TYPE (arg00);
6647 tree tt0 = TREE_TYPE (t0);
6648 tree tt1 = TREE_TYPE (t1);
6649 tree s0 = TYPE_SIZE (tt0);
6650 tree s1 = TYPE_SIZE (tt1);
6652 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6653 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6654 TREE_OPERAND (arg0, 1));
6657 tem = fold_convert_const (code, type, arg0);
6658 return tem ? tem : t;
6660 case VIEW_CONVERT_EXPR:
6661 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6662 return build1 (VIEW_CONVERT_EXPR, type,
6663 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6667 if (TREE_CODE (arg0) == CONSTRUCTOR
6668 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6670 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6672 return TREE_VALUE (m);
6677 if (TREE_CONSTANT (t) != wins)
6679 tem = copy_node (t);
6680 TREE_CONSTANT (tem) = wins;
6681 TREE_INVARIANT (tem) = wins;
6687 if (negate_expr_p (arg0))
6688 return fold_convert (type, negate_expr (arg0));
6692 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6693 return fold_abs_const (arg0, type);
6694 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6695 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6696 /* Convert fabs((double)float) into (double)fabsf(float). */
6697 else if (TREE_CODE (arg0) == NOP_EXPR
6698 && TREE_CODE (type) == REAL_TYPE)
6700 tree targ0 = strip_float_extensions (arg0);
6702 return fold_convert (type, fold (build1 (ABS_EXPR,
6706 else if (tree_expr_nonnegative_p (arg0))
6711 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6712 return fold_convert (type, arg0);
6713 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6714 return build2 (COMPLEX_EXPR, type,
6715 TREE_OPERAND (arg0, 0),
6716 negate_expr (TREE_OPERAND (arg0, 1)));
6717 else if (TREE_CODE (arg0) == COMPLEX_CST)
6718 return build_complex (type, TREE_REALPART (arg0),
6719 negate_expr (TREE_IMAGPART (arg0)));
6720 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6721 return fold (build2 (TREE_CODE (arg0), type,
6722 fold (build1 (CONJ_EXPR, type,
6723 TREE_OPERAND (arg0, 0))),
6724 fold (build1 (CONJ_EXPR, type,
6725 TREE_OPERAND (arg0, 1)))));
6726 else if (TREE_CODE (arg0) == CONJ_EXPR)
6727 return TREE_OPERAND (arg0, 0);
6731 if (TREE_CODE (arg0) == INTEGER_CST)
6732 return fold_not_const (arg0, type);
6733 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6734 return TREE_OPERAND (arg0, 0);
6738 /* A + (-B) -> A - B */
6739 if (TREE_CODE (arg1) == NEGATE_EXPR)
6740 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6741 /* (-A) + B -> B - A */
6742 if (TREE_CODE (arg0) == NEGATE_EXPR
6743 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6744 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6745 if (! FLOAT_TYPE_P (type))
6747 if (integer_zerop (arg1))
6748 return non_lvalue (fold_convert (type, arg0));
6750 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6751 with a constant, and the two constants have no bits in common,
6752 we should treat this as a BIT_IOR_EXPR since this may produce more
6754 if (TREE_CODE (arg0) == BIT_AND_EXPR
6755 && TREE_CODE (arg1) == BIT_AND_EXPR
6756 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6757 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6758 && integer_zerop (const_binop (BIT_AND_EXPR,
6759 TREE_OPERAND (arg0, 1),
6760 TREE_OPERAND (arg1, 1), 0)))
6762 code = BIT_IOR_EXPR;
6766 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6767 (plus (plus (mult) (mult)) (foo)) so that we can
6768 take advantage of the factoring cases below. */
6769 if (((TREE_CODE (arg0) == PLUS_EXPR
6770 || TREE_CODE (arg0) == MINUS_EXPR)
6771 && TREE_CODE (arg1) == MULT_EXPR)
6772 || ((TREE_CODE (arg1) == PLUS_EXPR
6773 || TREE_CODE (arg1) == MINUS_EXPR)
6774 && TREE_CODE (arg0) == MULT_EXPR))
6776 tree parg0, parg1, parg, marg;
6777 enum tree_code pcode;
6779 if (TREE_CODE (arg1) == MULT_EXPR)
6780 parg = arg0, marg = arg1;
6782 parg = arg1, marg = arg0;
6783 pcode = TREE_CODE (parg);
6784 parg0 = TREE_OPERAND (parg, 0);
6785 parg1 = TREE_OPERAND (parg, 1);
6789 if (TREE_CODE (parg0) == MULT_EXPR
6790 && TREE_CODE (parg1) != MULT_EXPR)
6791 return fold (build2 (pcode, type,
6792 fold (build2 (PLUS_EXPR, type,
6793 fold_convert (type, parg0),
6794 fold_convert (type, marg))),
6795 fold_convert (type, parg1)));
6796 if (TREE_CODE (parg0) != MULT_EXPR
6797 && TREE_CODE (parg1) == MULT_EXPR)
6798 return fold (build2 (PLUS_EXPR, type,
6799 fold_convert (type, parg0),
6800 fold (build2 (pcode, type,
6801 fold_convert (type, marg),
6806 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6808 tree arg00, arg01, arg10, arg11;
6809 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6811 /* (A * C) + (B * C) -> (A+B) * C.
6812 We are most concerned about the case where C is a constant,
6813 but other combinations show up during loop reduction. Since
6814 it is not difficult, try all four possibilities. */
6816 arg00 = TREE_OPERAND (arg0, 0);
6817 arg01 = TREE_OPERAND (arg0, 1);
6818 arg10 = TREE_OPERAND (arg1, 0);
6819 arg11 = TREE_OPERAND (arg1, 1);
6822 if (operand_equal_p (arg01, arg11, 0))
6823 same = arg01, alt0 = arg00, alt1 = arg10;
6824 else if (operand_equal_p (arg00, arg10, 0))
6825 same = arg00, alt0 = arg01, alt1 = arg11;
6826 else if (operand_equal_p (arg00, arg11, 0))
6827 same = arg00, alt0 = arg01, alt1 = arg10;
6828 else if (operand_equal_p (arg01, arg10, 0))
6829 same = arg01, alt0 = arg00, alt1 = arg11;
6831 /* No identical multiplicands; see if we can find a common
6832 power-of-two factor in non-power-of-two multiplies. This
6833 can help in multi-dimensional array access. */
6834 else if (TREE_CODE (arg01) == INTEGER_CST
6835 && TREE_CODE (arg11) == INTEGER_CST
6836 && TREE_INT_CST_HIGH (arg01) == 0
6837 && TREE_INT_CST_HIGH (arg11) == 0)
6839 HOST_WIDE_INT int01, int11, tmp;
6840 int01 = TREE_INT_CST_LOW (arg01);
6841 int11 = TREE_INT_CST_LOW (arg11);
6843 /* Move min of absolute values to int11. */
6844 if ((int01 >= 0 ? int01 : -int01)
6845 < (int11 >= 0 ? int11 : -int11))
6847 tmp = int01, int01 = int11, int11 = tmp;
6848 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6849 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6852 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6854 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6855 build_int_cst (NULL_TREE,
6863 return fold (build2 (MULT_EXPR, type,
6864 fold (build2 (PLUS_EXPR, type,
6865 fold_convert (type, alt0),
6866 fold_convert (type, alt1))),
6870 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6871 of the array. Loop optimizer sometimes produce this type of
6873 if (TREE_CODE (arg0) == ADDR_EXPR
6874 && TREE_CODE (arg1) == MULT_EXPR)
6876 tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
6880 else if (TREE_CODE (arg1) == ADDR_EXPR
6881 && TREE_CODE (arg0) == MULT_EXPR)
6883 tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
6890 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6891 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6892 return non_lvalue (fold_convert (type, arg0));
6894 /* Likewise if the operands are reversed. */
6895 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6896 return non_lvalue (fold_convert (type, arg1));
6898 /* Convert X + -C into X - C. */
6899 if (TREE_CODE (arg1) == REAL_CST
6900 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6902 tem = fold_negate_const (arg1, type);
6903 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6904 return fold (build2 (MINUS_EXPR, type,
6905 fold_convert (type, arg0),
6906 fold_convert (type, tem)));
6909 /* Convert x+x into x*2.0. */
6910 if (operand_equal_p (arg0, arg1, 0)
6911 && SCALAR_FLOAT_TYPE_P (type))
6912 return fold (build2 (MULT_EXPR, type, arg0,
6913 build_real (type, dconst2)));
6915 /* Convert x*c+x into x*(c+1). */
6916 if (flag_unsafe_math_optimizations
6917 && TREE_CODE (arg0) == MULT_EXPR
6918 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6919 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6920 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6924 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6925 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6926 return fold (build2 (MULT_EXPR, type, arg1,
6927 build_real (type, c)));
6930 /* Convert x+x*c into x*(c+1). */
6931 if (flag_unsafe_math_optimizations
6932 && TREE_CODE (arg1) == MULT_EXPR
6933 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6934 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6935 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6939 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6940 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6941 return fold (build2 (MULT_EXPR, type, arg0,
6942 build_real (type, c)));
6945 /* Convert x*c1+x*c2 into x*(c1+c2). */
6946 if (flag_unsafe_math_optimizations
6947 && TREE_CODE (arg0) == MULT_EXPR
6948 && TREE_CODE (arg1) == MULT_EXPR
6949 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6950 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6951 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6952 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6953 && operand_equal_p (TREE_OPERAND (arg0, 0),
6954 TREE_OPERAND (arg1, 0), 0))
6956 REAL_VALUE_TYPE c1, c2;
6958 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6959 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6960 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6961 return fold (build2 (MULT_EXPR, type,
6962 TREE_OPERAND (arg0, 0),
6963 build_real (type, c1)));
6965 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6966 if (flag_unsafe_math_optimizations
6967 && TREE_CODE (arg1) == PLUS_EXPR
6968 && TREE_CODE (arg0) != MULT_EXPR)
6970 tree tree10 = TREE_OPERAND (arg1, 0);
6971 tree tree11 = TREE_OPERAND (arg1, 1);
6972 if (TREE_CODE (tree11) == MULT_EXPR
6973 && TREE_CODE (tree10) == MULT_EXPR)
6976 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6977 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6980 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6981 if (flag_unsafe_math_optimizations
6982 && TREE_CODE (arg0) == PLUS_EXPR
6983 && TREE_CODE (arg1) != MULT_EXPR)
6985 tree tree00 = TREE_OPERAND (arg0, 0);
6986 tree tree01 = TREE_OPERAND (arg0, 1);
6987 if (TREE_CODE (tree01) == MULT_EXPR
6988 && TREE_CODE (tree00) == MULT_EXPR)
6991 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6992 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6998 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6999 is a rotate of A by C1 bits. */
7000 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7001 is a rotate of A by B bits. */
7003 enum tree_code code0, code1;
7004 code0 = TREE_CODE (arg0);
7005 code1 = TREE_CODE (arg1);
7006 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7007 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7008 && operand_equal_p (TREE_OPERAND (arg0, 0),
7009 TREE_OPERAND (arg1, 0), 0)
7010 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7012 tree tree01, tree11;
7013 enum tree_code code01, code11;
7015 tree01 = TREE_OPERAND (arg0, 1);
7016 tree11 = TREE_OPERAND (arg1, 1);
7017 STRIP_NOPS (tree01);
7018 STRIP_NOPS (tree11);
7019 code01 = TREE_CODE (tree01);
7020 code11 = TREE_CODE (tree11);
7021 if (code01 == INTEGER_CST
7022 && code11 == INTEGER_CST
7023 && TREE_INT_CST_HIGH (tree01) == 0
7024 && TREE_INT_CST_HIGH (tree11) == 0
7025 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7026 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7027 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7028 code0 == LSHIFT_EXPR ? tree01 : tree11);
7029 else if (code11 == MINUS_EXPR)
7031 tree tree110, tree111;
7032 tree110 = TREE_OPERAND (tree11, 0);
7033 tree111 = TREE_OPERAND (tree11, 1);
7034 STRIP_NOPS (tree110);
7035 STRIP_NOPS (tree111);
7036 if (TREE_CODE (tree110) == INTEGER_CST
7037 && 0 == compare_tree_int (tree110,
7039 (TREE_TYPE (TREE_OPERAND
7041 && operand_equal_p (tree01, tree111, 0))
7042 return build2 ((code0 == LSHIFT_EXPR
7045 type, TREE_OPERAND (arg0, 0), tree01);
7047 else if (code01 == MINUS_EXPR)
7049 tree tree010, tree011;
7050 tree010 = TREE_OPERAND (tree01, 0);
7051 tree011 = TREE_OPERAND (tree01, 1);
7052 STRIP_NOPS (tree010);
7053 STRIP_NOPS (tree011);
7054 if (TREE_CODE (tree010) == INTEGER_CST
7055 && 0 == compare_tree_int (tree010,
7057 (TREE_TYPE (TREE_OPERAND
7059 && operand_equal_p (tree11, tree011, 0))
7060 return build2 ((code0 != LSHIFT_EXPR
7063 type, TREE_OPERAND (arg0, 0), tree11);
7069 /* In most languages, can't associate operations on floats through
7070 parentheses. Rather than remember where the parentheses were, we
7071 don't associate floats at all, unless the user has specified
7072 -funsafe-math-optimizations. */
7075 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7077 tree var0, con0, lit0, minus_lit0;
7078 tree var1, con1, lit1, minus_lit1;
7080 /* Split both trees into variables, constants, and literals. Then
7081 associate each group together, the constants with literals,
7082 then the result with variables. This increases the chances of
7083 literals being recombined later and of generating relocatable
7084 expressions for the sum of a constant and literal. */
7085 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7086 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7087 code == MINUS_EXPR);
7089 /* Only do something if we found more than two objects. Otherwise,
7090 nothing has changed and we risk infinite recursion. */
7091 if (2 < ((var0 != 0) + (var1 != 0)
7092 + (con0 != 0) + (con1 != 0)
7093 + (lit0 != 0) + (lit1 != 0)
7094 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7096 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7097 if (code == MINUS_EXPR)
7100 var0 = associate_trees (var0, var1, code, type);
7101 con0 = associate_trees (con0, con1, code, type);
7102 lit0 = associate_trees (lit0, lit1, code, type);
7103 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7105 /* Preserve the MINUS_EXPR if the negative part of the literal is
7106 greater than the positive part. Otherwise, the multiplicative
7107 folding code (i.e extract_muldiv) may be fooled in case
7108 unsigned constants are subtracted, like in the following
7109 example: ((X*2 + 4) - 8U)/2. */
7110 if (minus_lit0 && lit0)
7112 if (TREE_CODE (lit0) == INTEGER_CST
7113 && TREE_CODE (minus_lit0) == INTEGER_CST
7114 && tree_int_cst_lt (lit0, minus_lit0))
7116 minus_lit0 = associate_trees (minus_lit0, lit0,
7122 lit0 = associate_trees (lit0, minus_lit0,
7130 return fold_convert (type,
7131 associate_trees (var0, minus_lit0,
7135 con0 = associate_trees (con0, minus_lit0,
7137 return fold_convert (type,
7138 associate_trees (var0, con0,
7143 con0 = associate_trees (con0, lit0, code, type);
7144 return fold_convert (type, associate_trees (var0, con0,
7151 t1 = const_binop (code, arg0, arg1, 0);
7152 if (t1 != NULL_TREE)
7154 /* The return value should always have
7155 the same type as the original expression. */
7156 if (TREE_TYPE (t1) != type)
7157 t1 = fold_convert (type, t1);
7164 /* A - (-B) -> A + B */
7165 if (TREE_CODE (arg1) == NEGATE_EXPR)
7166 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7167 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7168 if (TREE_CODE (arg0) == NEGATE_EXPR
7169 && (FLOAT_TYPE_P (type)
7170 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7171 && negate_expr_p (arg1)
7172 && reorder_operands_p (arg0, arg1))
7173 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7174 TREE_OPERAND (arg0, 0)));
7176 if (! FLOAT_TYPE_P (type))
7178 if (! wins && integer_zerop (arg0))
7179 return negate_expr (fold_convert (type, arg1));
7180 if (integer_zerop (arg1))
7181 return non_lvalue (fold_convert (type, arg0));
7183 /* Fold A - (A & B) into ~B & A. */
7184 if (!TREE_SIDE_EFFECTS (arg0)
7185 && TREE_CODE (arg1) == BIT_AND_EXPR)
7187 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7188 return fold (build2 (BIT_AND_EXPR, type,
7189 fold (build1 (BIT_NOT_EXPR, type,
7190 TREE_OPERAND (arg1, 0))),
7192 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7193 return fold (build2 (BIT_AND_EXPR, type,
7194 fold (build1 (BIT_NOT_EXPR, type,
7195 TREE_OPERAND (arg1, 1))),
7199 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7200 any power of 2 minus 1. */
7201 if (TREE_CODE (arg0) == BIT_AND_EXPR
7202 && TREE_CODE (arg1) == BIT_AND_EXPR
7203 && operand_equal_p (TREE_OPERAND (arg0, 0),
7204 TREE_OPERAND (arg1, 0), 0))
7206 tree mask0 = TREE_OPERAND (arg0, 1);
7207 tree mask1 = TREE_OPERAND (arg1, 1);
7208 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7210 if (operand_equal_p (tem, mask1, 0))
7212 tem = fold (build2 (BIT_XOR_EXPR, type,
7213 TREE_OPERAND (arg0, 0), mask1));
7214 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7219 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7220 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7221 return non_lvalue (fold_convert (type, arg0));
7223 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7224 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7225 (-ARG1 + ARG0) reduces to -ARG1. */
7226 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7227 return negate_expr (fold_convert (type, arg1));
7229 /* Fold &x - &x. This can happen from &x.foo - &x.
7230 This is unsafe for certain floats even in non-IEEE formats.
7231 In IEEE, it is unsafe because it does wrong for NaNs.
7232 Also note that operand_equal_p is always false if an operand
7235 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7236 && operand_equal_p (arg0, arg1, 0))
7237 return fold_convert (type, integer_zero_node);
7239 /* A - B -> A + (-B) if B is easily negatable. */
7240 if (!wins && negate_expr_p (arg1)
7241 && ((FLOAT_TYPE_P (type)
7242 /* Avoid this transformation if B is a positive REAL_CST. */
7243 && (TREE_CODE (arg1) != REAL_CST
7244 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7245 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7246 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7248 /* Try folding difference of addresses. */
7252 if ((TREE_CODE (arg0) == ADDR_EXPR
7253 || TREE_CODE (arg1) == ADDR_EXPR)
7254 && ptr_difference_const (arg0, arg1, &diff))
7255 return build_int_cst_type (type, diff);
7258 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7259 of the array. Loop optimizer sometimes produce this type of
7261 if (TREE_CODE (arg0) == ADDR_EXPR
7262 && TREE_CODE (arg1) == MULT_EXPR)
7264 tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
7269 if (TREE_CODE (arg0) == MULT_EXPR
7270 && TREE_CODE (arg1) == MULT_EXPR
7271 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7273 /* (A * C) - (B * C) -> (A-B) * C. */
7274 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7275 TREE_OPERAND (arg1, 1), 0))
7276 return fold (build2 (MULT_EXPR, type,
7277 fold (build2 (MINUS_EXPR, type,
7278 TREE_OPERAND (arg0, 0),
7279 TREE_OPERAND (arg1, 0))),
7280 TREE_OPERAND (arg0, 1)));
7281 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7282 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7283 TREE_OPERAND (arg1, 0), 0))
7284 return fold (build2 (MULT_EXPR, type,
7285 TREE_OPERAND (arg0, 0),
7286 fold (build2 (MINUS_EXPR, type,
7287 TREE_OPERAND (arg0, 1),
7288 TREE_OPERAND (arg1, 1)))));
7294 /* (-A) * (-B) -> A * B */
7295 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7296 return fold (build2 (MULT_EXPR, type,
7297 TREE_OPERAND (arg0, 0),
7298 negate_expr (arg1)));
7299 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7300 return fold (build2 (MULT_EXPR, type,
7302 TREE_OPERAND (arg1, 0)));
7304 if (! FLOAT_TYPE_P (type))
7306 if (integer_zerop (arg1))
7307 return omit_one_operand (type, arg1, arg0);
7308 if (integer_onep (arg1))
7309 return non_lvalue (fold_convert (type, arg0));
7311 /* (a * (1 << b)) is (a << b) */
7312 if (TREE_CODE (arg1) == LSHIFT_EXPR
7313 && integer_onep (TREE_OPERAND (arg1, 0)))
7314 return fold (build2 (LSHIFT_EXPR, type, arg0,
7315 TREE_OPERAND (arg1, 1)));
7316 if (TREE_CODE (arg0) == LSHIFT_EXPR
7317 && integer_onep (TREE_OPERAND (arg0, 0)))
7318 return fold (build2 (LSHIFT_EXPR, type, arg1,
7319 TREE_OPERAND (arg0, 1)));
7321 if (TREE_CODE (arg1) == INTEGER_CST
7322 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7323 fold_convert (type, arg1),
7325 return fold_convert (type, tem);
7330 /* Maybe fold x * 0 to 0. The expressions aren't the same
7331 when x is NaN, since x * 0 is also NaN. Nor are they the
7332 same in modes with signed zeros, since multiplying a
7333 negative value by 0 gives -0, not +0. */
7334 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7335 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7336 && real_zerop (arg1))
7337 return omit_one_operand (type, arg1, arg0);
7338 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7339 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7340 && real_onep (arg1))
7341 return non_lvalue (fold_convert (type, arg0));
7343 /* Transform x * -1.0 into -x. */
7344 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7345 && real_minus_onep (arg1))
7346 return fold_convert (type, negate_expr (arg0));
7348 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7349 if (flag_unsafe_math_optimizations
7350 && TREE_CODE (arg0) == RDIV_EXPR
7351 && TREE_CODE (arg1) == REAL_CST
7352 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7354 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7357 return fold (build2 (RDIV_EXPR, type, tem,
7358 TREE_OPERAND (arg0, 1)));
7361 if (flag_unsafe_math_optimizations)
7363 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7364 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7366 /* Optimizations of root(...)*root(...). */
7367 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7369 tree rootfn, arg, arglist;
7370 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7371 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7373 /* Optimize sqrt(x)*sqrt(x) as x. */
7374 if (BUILTIN_SQRT_P (fcode0)
7375 && operand_equal_p (arg00, arg10, 0)
7376 && ! HONOR_SNANS (TYPE_MODE (type)))
7379 /* Optimize root(x)*root(y) as root(x*y). */
7380 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7381 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7382 arglist = build_tree_list (NULL_TREE, arg);
7383 return build_function_call_expr (rootfn, arglist);
7386 /* Optimize expN(x)*expN(y) as expN(x+y). */
7387 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7389 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7390 tree arg = build2 (PLUS_EXPR, type,
7391 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7392 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7393 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7394 return build_function_call_expr (expfn, arglist);
7397 /* Optimizations of pow(...)*pow(...). */
7398 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7399 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7400 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7402 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7403 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7405 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7406 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7409 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7410 if (operand_equal_p (arg01, arg11, 0))
7412 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7413 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7414 tree arglist = tree_cons (NULL_TREE, fold (arg),
7415 build_tree_list (NULL_TREE,
7417 return build_function_call_expr (powfn, arglist);
7420 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7421 if (operand_equal_p (arg00, arg10, 0))
7423 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7424 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7425 tree arglist = tree_cons (NULL_TREE, arg00,
7426 build_tree_list (NULL_TREE,
7428 return build_function_call_expr (powfn, arglist);
7432 /* Optimize tan(x)*cos(x) as sin(x). */
7433 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7434 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7435 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7436 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7437 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7438 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7439 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7440 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7442 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7444 if (sinfn != NULL_TREE)
7445 return build_function_call_expr (sinfn,
7446 TREE_OPERAND (arg0, 1));
7449 /* Optimize x*pow(x,c) as pow(x,c+1). */
7450 if (fcode1 == BUILT_IN_POW
7451 || fcode1 == BUILT_IN_POWF
7452 || fcode1 == BUILT_IN_POWL)
7454 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7455 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7457 if (TREE_CODE (arg11) == REAL_CST
7458 && ! TREE_CONSTANT_OVERFLOW (arg11)
7459 && operand_equal_p (arg0, arg10, 0))
7461 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7465 c = TREE_REAL_CST (arg11);
7466 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7467 arg = build_real (type, c);
7468 arglist = build_tree_list (NULL_TREE, arg);
7469 arglist = tree_cons (NULL_TREE, arg0, arglist);
7470 return build_function_call_expr (powfn, arglist);
7474 /* Optimize pow(x,c)*x as pow(x,c+1). */
7475 if (fcode0 == BUILT_IN_POW
7476 || fcode0 == BUILT_IN_POWF
7477 || fcode0 == BUILT_IN_POWL)
7479 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7480 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7482 if (TREE_CODE (arg01) == REAL_CST
7483 && ! TREE_CONSTANT_OVERFLOW (arg01)
7484 && operand_equal_p (arg1, arg00, 0))
7486 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7490 c = TREE_REAL_CST (arg01);
7491 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7492 arg = build_real (type, c);
7493 arglist = build_tree_list (NULL_TREE, arg);
7494 arglist = tree_cons (NULL_TREE, arg1, arglist);
7495 return build_function_call_expr (powfn, arglist);
7499 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7501 && operand_equal_p (arg0, arg1, 0))
7503 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7507 tree arg = build_real (type, dconst2);
7508 tree arglist = build_tree_list (NULL_TREE, arg);
7509 arglist = tree_cons (NULL_TREE, arg0, arglist);
7510 return build_function_call_expr (powfn, arglist);
7519 if (integer_all_onesp (arg1))
7520 return omit_one_operand (type, arg1, arg0);
7521 if (integer_zerop (arg1))
7522 return non_lvalue (fold_convert (type, arg0));
7523 if (operand_equal_p (arg0, arg1, 0))
7524 return non_lvalue (fold_convert (type, arg0));
7527 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7528 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7530 t1 = build_int_cst (type, -1);
7531 t1 = force_fit_type (t1, 0, false, false);
7532 return omit_one_operand (type, t1, arg1);
7536 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7537 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7539 t1 = build_int_cst (type, -1);
7540 t1 = force_fit_type (t1, 0, false, false);
7541 return omit_one_operand (type, t1, arg0);
7544 t1 = distribute_bit_expr (code, type, arg0, arg1);
7545 if (t1 != NULL_TREE)
7548 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7550 This results in more efficient code for machines without a NAND
7551 instruction. Combine will canonicalize to the first form
7552 which will allow use of NAND instructions provided by the
7553 backend if they exist. */
7554 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7555 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7557 return fold (build1 (BIT_NOT_EXPR, type,
7558 build2 (BIT_AND_EXPR, type,
7559 TREE_OPERAND (arg0, 0),
7560 TREE_OPERAND (arg1, 0))));
7563 /* See if this can be simplified into a rotate first. If that
7564 is unsuccessful continue in the association code. */
7568 if (integer_zerop (arg1))
7569 return non_lvalue (fold_convert (type, arg0));
7570 if (integer_all_onesp (arg1))
7571 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7572 if (operand_equal_p (arg0, arg1, 0))
7573 return omit_one_operand (type, integer_zero_node, arg0);
7576 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7577 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7579 t1 = build_int_cst (type, -1);
7580 t1 = force_fit_type (t1, 0, false, false);
7581 return omit_one_operand (type, t1, arg1);
7585 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7586 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7588 t1 = build_int_cst (type, -1);
7589 t1 = force_fit_type (t1, 0, false, false);
7590 return omit_one_operand (type, t1, arg0);
7593 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7594 with a constant, and the two constants have no bits in common,
7595 we should treat this as a BIT_IOR_EXPR since this may produce more
7597 if (TREE_CODE (arg0) == BIT_AND_EXPR
7598 && TREE_CODE (arg1) == BIT_AND_EXPR
7599 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7600 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7601 && integer_zerop (const_binop (BIT_AND_EXPR,
7602 TREE_OPERAND (arg0, 1),
7603 TREE_OPERAND (arg1, 1), 0)))
7605 code = BIT_IOR_EXPR;
7609 /* See if this can be simplified into a rotate first. If that
7610 is unsuccessful continue in the association code. */
7614 if (integer_all_onesp (arg1))
7615 return non_lvalue (fold_convert (type, arg0));
7616 if (integer_zerop (arg1))
7617 return omit_one_operand (type, arg1, arg0);
7618 if (operand_equal_p (arg0, arg1, 0))
7619 return non_lvalue (fold_convert (type, arg0));
7621 /* ~X & X is always zero. */
7622 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7623 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7624 return omit_one_operand (type, integer_zero_node, arg1);
7626 /* X & ~X is always zero. */
7627 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7628 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7629 return omit_one_operand (type, integer_zero_node, arg0);
7631 t1 = distribute_bit_expr (code, type, arg0, arg1);
7632 if (t1 != NULL_TREE)
7634 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7635 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7636 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7639 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7641 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7642 && (~TREE_INT_CST_LOW (arg1)
7643 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7644 return fold_convert (type, TREE_OPERAND (arg0, 0));
7647 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7649 This results in more efficient code for machines without a NOR
7650 instruction. Combine will canonicalize to the first form
7651 which will allow use of NOR instructions provided by the
7652 backend if they exist. */
7653 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7654 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7656 return fold (build1 (BIT_NOT_EXPR, type,
7657 build2 (BIT_IOR_EXPR, type,
7658 TREE_OPERAND (arg0, 0),
7659 TREE_OPERAND (arg1, 0))));
7665 /* Don't touch a floating-point divide by zero unless the mode
7666 of the constant can represent infinity. */
7667 if (TREE_CODE (arg1) == REAL_CST
7668 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7669 && real_zerop (arg1))
7672 /* (-A) / (-B) -> A / B */
7673 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7674 return fold (build2 (RDIV_EXPR, type,
7675 TREE_OPERAND (arg0, 0),
7676 negate_expr (arg1)));
7677 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7678 return fold (build2 (RDIV_EXPR, type,
7680 TREE_OPERAND (arg1, 0)));
7682 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7683 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7684 && real_onep (arg1))
7685 return non_lvalue (fold_convert (type, arg0));
7687 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7688 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7689 && real_minus_onep (arg1))
7690 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7692 /* If ARG1 is a constant, we can convert this to a multiply by the
7693 reciprocal. This does not have the same rounding properties,
7694 so only do this if -funsafe-math-optimizations. We can actually
7695 always safely do it if ARG1 is a power of two, but it's hard to
7696 tell if it is or not in a portable manner. */
7697 if (TREE_CODE (arg1) == REAL_CST)
7699 if (flag_unsafe_math_optimizations
7700 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7702 return fold (build2 (MULT_EXPR, type, arg0, tem));
7703 /* Find the reciprocal if optimizing and the result is exact. */
7707 r = TREE_REAL_CST (arg1);
7708 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7710 tem = build_real (type, r);
7711 return fold (build2 (MULT_EXPR, type, arg0, tem));
7715 /* Convert A/B/C to A/(B*C). */
7716 if (flag_unsafe_math_optimizations
7717 && TREE_CODE (arg0) == RDIV_EXPR)
7718 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7719 fold (build2 (MULT_EXPR, type,
7720 TREE_OPERAND (arg0, 1), arg1))));
7722 /* Convert A/(B/C) to (A/B)*C. */
7723 if (flag_unsafe_math_optimizations
7724 && TREE_CODE (arg1) == RDIV_EXPR)
7725 return fold (build2 (MULT_EXPR, type,
7726 fold (build2 (RDIV_EXPR, type, arg0,
7727 TREE_OPERAND (arg1, 0))),
7728 TREE_OPERAND (arg1, 1)));
7730 /* Convert C1/(X*C2) into (C1/C2)/X. */
7731 if (flag_unsafe_math_optimizations
7732 && TREE_CODE (arg1) == MULT_EXPR
7733 && TREE_CODE (arg0) == REAL_CST
7734 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7736 tree tem = const_binop (RDIV_EXPR, arg0,
7737 TREE_OPERAND (arg1, 1), 0);
7739 return fold (build2 (RDIV_EXPR, type, tem,
7740 TREE_OPERAND (arg1, 0)));
7743 if (flag_unsafe_math_optimizations)
7745 enum built_in_function fcode = builtin_mathfn_code (arg1);
7746 /* Optimize x/expN(y) into x*expN(-y). */
7747 if (BUILTIN_EXPONENT_P (fcode))
7749 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7750 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7751 tree arglist = build_tree_list (NULL_TREE,
7752 fold_convert (type, arg));
7753 arg1 = build_function_call_expr (expfn, arglist);
7754 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7757 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7758 if (fcode == BUILT_IN_POW
7759 || fcode == BUILT_IN_POWF
7760 || fcode == BUILT_IN_POWL)
7762 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7763 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7764 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7765 tree neg11 = fold_convert (type, negate_expr (arg11));
7766 tree arglist = tree_cons(NULL_TREE, arg10,
7767 build_tree_list (NULL_TREE, neg11));
7768 arg1 = build_function_call_expr (powfn, arglist);
7769 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7773 if (flag_unsafe_math_optimizations)
7775 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7776 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7778 /* Optimize sin(x)/cos(x) as tan(x). */
7779 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7780 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7781 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7782 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7783 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7785 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7787 if (tanfn != NULL_TREE)
7788 return build_function_call_expr (tanfn,
7789 TREE_OPERAND (arg0, 1));
7792 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7793 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7794 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7795 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7796 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7797 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7799 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7801 if (tanfn != NULL_TREE)
7803 tree tmp = TREE_OPERAND (arg0, 1);
7804 tmp = build_function_call_expr (tanfn, tmp);
7805 return fold (build2 (RDIV_EXPR, type,
7806 build_real (type, dconst1), tmp));
7810 /* Optimize pow(x,c)/x as pow(x,c-1). */
7811 if (fcode0 == BUILT_IN_POW
7812 || fcode0 == BUILT_IN_POWF
7813 || fcode0 == BUILT_IN_POWL)
7815 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7816 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7817 if (TREE_CODE (arg01) == REAL_CST
7818 && ! TREE_CONSTANT_OVERFLOW (arg01)
7819 && operand_equal_p (arg1, arg00, 0))
7821 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7825 c = TREE_REAL_CST (arg01);
7826 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7827 arg = build_real (type, c);
7828 arglist = build_tree_list (NULL_TREE, arg);
7829 arglist = tree_cons (NULL_TREE, arg1, arglist);
7830 return build_function_call_expr (powfn, arglist);
7836 case TRUNC_DIV_EXPR:
7837 case ROUND_DIV_EXPR:
7838 case FLOOR_DIV_EXPR:
7840 case EXACT_DIV_EXPR:
7841 if (integer_onep (arg1))
7842 return non_lvalue (fold_convert (type, arg0));
7843 if (integer_zerop (arg1))
7846 if (!TYPE_UNSIGNED (type)
7847 && TREE_CODE (arg1) == INTEGER_CST
7848 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7849 && TREE_INT_CST_HIGH (arg1) == -1)
7850 return fold_convert (type, negate_expr (arg0));
7852 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7853 operation, EXACT_DIV_EXPR.
7855 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7856 At one time others generated faster code, it's not clear if they do
7857 after the last round to changes to the DIV code in expmed.c. */
7858 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7859 && multiple_of_p (type, arg0, arg1))
7860 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7862 if (TREE_CODE (arg1) == INTEGER_CST
7863 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7865 return fold_convert (type, tem);
7870 case FLOOR_MOD_EXPR:
7871 case ROUND_MOD_EXPR:
7872 case TRUNC_MOD_EXPR:
7873 if (integer_onep (arg1))
7874 return omit_one_operand (type, integer_zero_node, arg0);
7875 if (integer_zerop (arg1))
7878 /* X % -1 is zero. */
7879 if (!TYPE_UNSIGNED (type)
7880 && TREE_CODE (arg1) == INTEGER_CST
7881 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7882 && TREE_INT_CST_HIGH (arg1) == -1)
7883 return omit_one_operand (type, integer_zero_node, arg0);
7885 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7886 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7887 if (code == TRUNC_MOD_EXPR
7888 && TYPE_UNSIGNED (type)
7889 && integer_pow2p (arg1))
7891 unsigned HOST_WIDE_INT high, low;
7895 l = tree_log2 (arg1);
7896 if (l >= HOST_BITS_PER_WIDE_INT)
7898 high = ((unsigned HOST_WIDE_INT) 1
7899 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7905 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7908 mask = build_int_cst_wide (type, low, high);
7909 return fold (build2 (BIT_AND_EXPR, type,
7910 fold_convert (type, arg0), mask));
7913 /* X % -C is the same as X % C. */
7914 if (code == TRUNC_MOD_EXPR
7915 && !TYPE_UNSIGNED (type)
7916 && TREE_CODE (arg1) == INTEGER_CST
7917 && TREE_INT_CST_HIGH (arg1) < 0
7919 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7920 && !sign_bit_p (arg1, arg1))
7921 return fold (build2 (code, type, fold_convert (type, arg0),
7922 fold_convert (type, negate_expr (arg1))));
7924 /* X % -Y is the same as X % Y. */
7925 if (code == TRUNC_MOD_EXPR
7926 && !TYPE_UNSIGNED (type)
7927 && TREE_CODE (arg1) == NEGATE_EXPR
7929 return fold (build2 (code, type, fold_convert (type, arg0),
7930 fold_convert (type, TREE_OPERAND (arg1, 0))));
7932 if (TREE_CODE (arg1) == INTEGER_CST
7933 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7935 return fold_convert (type, tem);
7941 if (integer_all_onesp (arg0))
7942 return omit_one_operand (type, arg0, arg1);
7946 /* Optimize -1 >> x for arithmetic right shifts. */
7947 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7948 return omit_one_operand (type, arg0, arg1);
7949 /* ... fall through ... */
7953 if (integer_zerop (arg1))
7954 return non_lvalue (fold_convert (type, arg0));
7955 if (integer_zerop (arg0))
7956 return omit_one_operand (type, arg0, arg1);
7958 /* Since negative shift count is not well-defined,
7959 don't try to compute it in the compiler. */
7960 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7962 /* Rewrite an LROTATE_EXPR by a constant into an
7963 RROTATE_EXPR by a new constant. */
7964 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7966 tree tem = build_int_cst (NULL_TREE,
7967 GET_MODE_BITSIZE (TYPE_MODE (type)));
7968 tem = fold_convert (TREE_TYPE (arg1), tem);
7969 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7970 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7973 /* If we have a rotate of a bit operation with the rotate count and
7974 the second operand of the bit operation both constant,
7975 permute the two operations. */
7976 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7977 && (TREE_CODE (arg0) == BIT_AND_EXPR
7978 || TREE_CODE (arg0) == BIT_IOR_EXPR
7979 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7980 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7981 return fold (build2 (TREE_CODE (arg0), type,
7982 fold (build2 (code, type,
7983 TREE_OPERAND (arg0, 0), arg1)),
7984 fold (build2 (code, type,
7985 TREE_OPERAND (arg0, 1), arg1))));
7987 /* Two consecutive rotates adding up to the width of the mode can
7989 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7990 && TREE_CODE (arg0) == RROTATE_EXPR
7991 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7992 && TREE_INT_CST_HIGH (arg1) == 0
7993 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7994 && ((TREE_INT_CST_LOW (arg1)
7995 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7996 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7997 return TREE_OPERAND (arg0, 0);
8002 if (operand_equal_p (arg0, arg1, 0))
8003 return omit_one_operand (type, arg0, arg1);
8004 if (INTEGRAL_TYPE_P (type)
8005 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8006 return omit_one_operand (type, arg1, arg0);
8010 if (operand_equal_p (arg0, arg1, 0))
8011 return omit_one_operand (type, arg0, arg1);
8012 if (INTEGRAL_TYPE_P (type)
8013 && TYPE_MAX_VALUE (type)
8014 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8015 return omit_one_operand (type, arg1, arg0);
8018 case TRUTH_NOT_EXPR:
8019 /* The argument to invert_truthvalue must have Boolean type. */
8020 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8021 arg0 = fold_convert (boolean_type_node, arg0);
8023 /* Note that the operand of this must be an int
8024 and its values must be 0 or 1.
8025 ("true" is a fixed value perhaps depending on the language,
8026 but we don't handle values other than 1 correctly yet.) */
8027 tem = invert_truthvalue (arg0);
8028 /* Avoid infinite recursion. */
8029 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8031 return fold_convert (type, tem);
8033 case TRUTH_ANDIF_EXPR:
8034 /* Note that the operands of this must be ints
8035 and their values must be 0 or 1.
8036 ("true" is a fixed value perhaps depending on the language.) */
8037 /* If first arg is constant zero, return it. */
8038 if (integer_zerop (arg0))
8039 return fold_convert (type, arg0);
8040 case TRUTH_AND_EXPR:
8041 /* If either arg is constant true, drop it. */
8042 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8043 return non_lvalue (fold_convert (type, arg1));
8044 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8045 /* Preserve sequence points. */
8046 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8047 return non_lvalue (fold_convert (type, arg0));
8048 /* If second arg is constant zero, result is zero, but first arg
8049 must be evaluated. */
8050 if (integer_zerop (arg1))
8051 return omit_one_operand (type, arg1, arg0);
8052 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8053 case will be handled here. */
8054 if (integer_zerop (arg0))
8055 return omit_one_operand (type, arg0, arg1);
8057 /* !X && X is always false. */
8058 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8059 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8060 return omit_one_operand (type, integer_zero_node, arg1);
8061 /* X && !X is always false. */
8062 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8063 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8064 return omit_one_operand (type, integer_zero_node, arg0);
8066 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8067 means A >= Y && A != MAX, but in this case we know that
8070 if (!TREE_SIDE_EFFECTS (arg0)
8071 && !TREE_SIDE_EFFECTS (arg1))
8073 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8075 return fold (build2 (code, type, tem, arg1));
8077 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8079 return fold (build2 (code, type, arg0, tem));
8083 /* We only do these simplifications if we are optimizing. */
8087 /* Check for things like (A || B) && (A || C). We can convert this
8088 to A || (B && C). Note that either operator can be any of the four
8089 truth and/or operations and the transformation will still be
8090 valid. Also note that we only care about order for the
8091 ANDIF and ORIF operators. If B contains side effects, this
8092 might change the truth-value of A. */
8093 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8094 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8095 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8096 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8097 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8098 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8100 tree a00 = TREE_OPERAND (arg0, 0);
8101 tree a01 = TREE_OPERAND (arg0, 1);
8102 tree a10 = TREE_OPERAND (arg1, 0);
8103 tree a11 = TREE_OPERAND (arg1, 1);
8104 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8105 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8106 && (code == TRUTH_AND_EXPR
8107 || code == TRUTH_OR_EXPR));
8109 if (operand_equal_p (a00, a10, 0))
8110 return fold (build2 (TREE_CODE (arg0), type, a00,
8111 fold (build2 (code, type, a01, a11))));
8112 else if (commutative && operand_equal_p (a00, a11, 0))
8113 return fold (build2 (TREE_CODE (arg0), type, a00,
8114 fold (build2 (code, type, a01, a10))));
8115 else if (commutative && operand_equal_p (a01, a10, 0))
8116 return fold (build2 (TREE_CODE (arg0), type, a01,
8117 fold (build2 (code, type, a00, a11))));
8119 /* This case if tricky because we must either have commutative
8120 operators or else A10 must not have side-effects. */
8122 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8123 && operand_equal_p (a01, a11, 0))
8124 return fold (build2 (TREE_CODE (arg0), type,
8125 fold (build2 (code, type, a00, a10)),
8129 /* See if we can build a range comparison. */
8130 if (0 != (tem = fold_range_test (t)))
8133 /* Check for the possibility of merging component references. If our
8134 lhs is another similar operation, try to merge its rhs with our
8135 rhs. Then try to merge our lhs and rhs. */
8136 if (TREE_CODE (arg0) == code
8137 && 0 != (tem = fold_truthop (code, type,
8138 TREE_OPERAND (arg0, 1), arg1)))
8139 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8141 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8146 case TRUTH_ORIF_EXPR:
8147 /* Note that the operands of this must be ints
8148 and their values must be 0 or true.
8149 ("true" is a fixed value perhaps depending on the language.) */
8150 /* If first arg is constant true, return it. */
8151 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8152 return fold_convert (type, arg0);
8154 /* If either arg is constant zero, drop it. */
8155 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8156 return non_lvalue (fold_convert (type, arg1));
8157 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8158 /* Preserve sequence points. */
8159 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8160 return non_lvalue (fold_convert (type, arg0));
8161 /* If second arg is constant true, result is true, but we must
8162 evaluate first arg. */
8163 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8164 return omit_one_operand (type, arg1, arg0);
8165 /* Likewise for first arg, but note this only occurs here for
8167 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8168 return omit_one_operand (type, arg0, arg1);
8170 /* !X || X is always true. */
8171 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8172 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8173 return omit_one_operand (type, integer_one_node, arg1);
8174 /* X || !X is always true. */
8175 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8176 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8177 return omit_one_operand (type, integer_one_node, arg0);
8181 case TRUTH_XOR_EXPR:
8182 /* If the second arg is constant zero, drop it. */
8183 if (integer_zerop (arg1))
8184 return non_lvalue (fold_convert (type, arg0));
8185 /* If the second arg is constant true, this is a logical inversion. */
8186 if (integer_onep (arg1))
8187 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8188 /* Identical arguments cancel to zero. */
8189 if (operand_equal_p (arg0, arg1, 0))
8190 return omit_one_operand (type, integer_zero_node, arg0);
8192 /* !X ^ X is always true. */
8193 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8194 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8195 return omit_one_operand (type, integer_one_node, arg1);
8197 /* X ^ !X is always true. */
8198 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8199 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8200 return omit_one_operand (type, integer_one_node, arg0);
8210 /* If one arg is a real or integer constant, put it last. */
8211 if (tree_swap_operands_p (arg0, arg1, true))
8212 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8214 /* If this is an equality comparison of the address of a non-weak
8215 object against zero, then we know the result. */
8216 if ((code == EQ_EXPR || code == NE_EXPR)
8217 && TREE_CODE (arg0) == ADDR_EXPR
8218 && DECL_P (TREE_OPERAND (arg0, 0))
8219 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8220 && integer_zerop (arg1))
8221 return constant_boolean_node (code != EQ_EXPR, type);
8223 /* If this is an equality comparison of the address of two non-weak,
8224 unaliased symbols neither of which are extern (since we do not
8225 have access to attributes for externs), then we know the result. */
8226 if ((code == EQ_EXPR || code == NE_EXPR)
8227 && TREE_CODE (arg0) == ADDR_EXPR
8228 && DECL_P (TREE_OPERAND (arg0, 0))
8229 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8230 && ! lookup_attribute ("alias",
8231 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8232 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8233 && TREE_CODE (arg1) == ADDR_EXPR
8234 && DECL_P (TREE_OPERAND (arg1, 0))
8235 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8236 && ! lookup_attribute ("alias",
8237 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8238 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8239 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8240 ? code == EQ_EXPR : code != EQ_EXPR,
8243 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8245 tree targ0 = strip_float_extensions (arg0);
8246 tree targ1 = strip_float_extensions (arg1);
8247 tree newtype = TREE_TYPE (targ0);
8249 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8250 newtype = TREE_TYPE (targ1);
8252 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8253 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8254 return fold (build2 (code, type, fold_convert (newtype, targ0),
8255 fold_convert (newtype, targ1)));
8257 /* (-a) CMP (-b) -> b CMP a */
8258 if (TREE_CODE (arg0) == NEGATE_EXPR
8259 && TREE_CODE (arg1) == NEGATE_EXPR)
8260 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8261 TREE_OPERAND (arg0, 0)));
8263 if (TREE_CODE (arg1) == REAL_CST)
8265 REAL_VALUE_TYPE cst;
8266 cst = TREE_REAL_CST (arg1);
8268 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8269 if (TREE_CODE (arg0) == NEGATE_EXPR)
8271 fold (build2 (swap_tree_comparison (code), type,
8272 TREE_OPERAND (arg0, 0),
8273 build_real (TREE_TYPE (arg1),
8274 REAL_VALUE_NEGATE (cst))));
8276 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8277 /* a CMP (-0) -> a CMP 0 */
8278 if (REAL_VALUE_MINUS_ZERO (cst))
8279 return fold (build2 (code, type, arg0,
8280 build_real (TREE_TYPE (arg1), dconst0)));
8282 /* x != NaN is always true, other ops are always false. */
8283 if (REAL_VALUE_ISNAN (cst)
8284 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8286 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8287 return omit_one_operand (type, tem, arg0);
8290 /* Fold comparisons against infinity. */
8291 if (REAL_VALUE_ISINF (cst))
8293 tem = fold_inf_compare (code, type, arg0, arg1);
8294 if (tem != NULL_TREE)
8299 /* If this is a comparison of a real constant with a PLUS_EXPR
8300 or a MINUS_EXPR of a real constant, we can convert it into a
8301 comparison with a revised real constant as long as no overflow
8302 occurs when unsafe_math_optimizations are enabled. */
8303 if (flag_unsafe_math_optimizations
8304 && TREE_CODE (arg1) == REAL_CST
8305 && (TREE_CODE (arg0) == PLUS_EXPR
8306 || TREE_CODE (arg0) == MINUS_EXPR)
8307 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8308 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8309 ? MINUS_EXPR : PLUS_EXPR,
8310 arg1, TREE_OPERAND (arg0, 1), 0))
8311 && ! TREE_CONSTANT_OVERFLOW (tem))
8312 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8314 /* Likewise, we can simplify a comparison of a real constant with
8315 a MINUS_EXPR whose first operand is also a real constant, i.e.
8316 (c1 - x) < c2 becomes x > c1-c2. */
8317 if (flag_unsafe_math_optimizations
8318 && TREE_CODE (arg1) == REAL_CST
8319 && TREE_CODE (arg0) == MINUS_EXPR
8320 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8321 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8323 && ! TREE_CONSTANT_OVERFLOW (tem))
8324 return fold (build2 (swap_tree_comparison (code), type,
8325 TREE_OPERAND (arg0, 1), tem));
8327 /* Fold comparisons against built-in math functions. */
8328 if (TREE_CODE (arg1) == REAL_CST
8329 && flag_unsafe_math_optimizations
8330 && ! flag_errno_math)
8332 enum built_in_function fcode = builtin_mathfn_code (arg0);
8334 if (fcode != END_BUILTINS)
8336 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8337 if (tem != NULL_TREE)
8343 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8344 if (TREE_CONSTANT (arg1)
8345 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8346 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8347 /* This optimization is invalid for ordered comparisons
8348 if CONST+INCR overflows or if foo+incr might overflow.
8349 This optimization is invalid for floating point due to rounding.
8350 For pointer types we assume overflow doesn't happen. */
8351 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8352 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8353 && (code == EQ_EXPR || code == NE_EXPR))))
8355 tree varop, newconst;
8357 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8359 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8360 arg1, TREE_OPERAND (arg0, 1)));
8361 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8362 TREE_OPERAND (arg0, 0),
8363 TREE_OPERAND (arg0, 1));
8367 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8368 arg1, TREE_OPERAND (arg0, 1)));
8369 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8370 TREE_OPERAND (arg0, 0),
8371 TREE_OPERAND (arg0, 1));
8375 /* If VAROP is a reference to a bitfield, we must mask
8376 the constant by the width of the field. */
8377 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8378 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8379 && host_integerp (DECL_SIZE (TREE_OPERAND
8380 (TREE_OPERAND (varop, 0), 1)), 1))
8382 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8383 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8384 tree folded_compare, shift;
8386 /* First check whether the comparison would come out
8387 always the same. If we don't do that we would
8388 change the meaning with the masking. */
8389 folded_compare = fold (build2 (code, type,
8390 TREE_OPERAND (varop, 0), arg1));
8391 if (integer_zerop (folded_compare)
8392 || integer_onep (folded_compare))
8393 return omit_one_operand (type, folded_compare, varop);
8395 shift = build_int_cst (NULL_TREE,
8396 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8397 shift = fold_convert (TREE_TYPE (varop), shift);
8398 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8400 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8404 return fold (build2 (code, type, varop, newconst));
8407 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8408 This transformation affects the cases which are handled in later
8409 optimizations involving comparisons with non-negative constants. */
8410 if (TREE_CODE (arg1) == INTEGER_CST
8411 && TREE_CODE (arg0) != INTEGER_CST
8412 && tree_int_cst_sgn (arg1) > 0)
8417 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8418 return fold (build2 (GT_EXPR, type, arg0, arg1));
8421 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8422 return fold (build2 (LE_EXPR, type, arg0, arg1));
8429 /* Comparisons with the highest or lowest possible integer of
8430 the specified size will have known values.
8432 This is quite similar to fold_relational_hi_lo, however,
8433 attempts to share the code have been nothing but trouble. */
8435 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8437 if (TREE_CODE (arg1) == INTEGER_CST
8438 && ! TREE_CONSTANT_OVERFLOW (arg1)
8439 && width <= HOST_BITS_PER_WIDE_INT
8440 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8441 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8443 unsigned HOST_WIDE_INT signed_max;
8444 unsigned HOST_WIDE_INT max, min;
8446 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8448 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8450 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8456 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8459 if (TREE_INT_CST_HIGH (arg1) == 0
8460 && TREE_INT_CST_LOW (arg1) == max)
8464 return omit_one_operand (type, integer_zero_node, arg0);
8467 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8470 return omit_one_operand (type, integer_one_node, arg0);
8473 return fold (build2 (NE_EXPR, type, arg0, arg1));
8475 /* The GE_EXPR and LT_EXPR cases above are not normally
8476 reached because of previous transformations. */
8481 else if (TREE_INT_CST_HIGH (arg1) == 0
8482 && TREE_INT_CST_LOW (arg1) == max - 1)
8486 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8487 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8489 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8490 return fold (build2 (NE_EXPR, type, arg0, arg1));
8494 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8495 && TREE_INT_CST_LOW (arg1) == min)
8499 return omit_one_operand (type, integer_zero_node, arg0);
8502 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8505 return omit_one_operand (type, integer_one_node, arg0);
8508 return fold (build2 (NE_EXPR, type, arg0, arg1));
8513 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8514 && TREE_INT_CST_LOW (arg1) == min + 1)
8518 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8519 return fold (build2 (NE_EXPR, type, arg0, arg1));
8521 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8522 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8527 else if (!in_gimple_form
8528 && TREE_INT_CST_HIGH (arg1) == 0
8529 && TREE_INT_CST_LOW (arg1) == signed_max
8530 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8531 /* signed_type does not work on pointer types. */
8532 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8534 /* The following case also applies to X < signed_max+1
8535 and X >= signed_max+1 because previous transformations. */
8536 if (code == LE_EXPR || code == GT_EXPR)
8539 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8540 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8542 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8543 type, fold_convert (st0, arg0),
8544 fold_convert (st1, integer_zero_node)));
8550 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8551 a MINUS_EXPR of a constant, we can convert it into a comparison with
8552 a revised constant as long as no overflow occurs. */
8553 if ((code == EQ_EXPR || code == NE_EXPR)
8554 && TREE_CODE (arg1) == INTEGER_CST
8555 && (TREE_CODE (arg0) == PLUS_EXPR
8556 || TREE_CODE (arg0) == MINUS_EXPR)
8557 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8558 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8559 ? MINUS_EXPR : PLUS_EXPR,
8560 arg1, TREE_OPERAND (arg0, 1), 0))
8561 && ! TREE_CONSTANT_OVERFLOW (tem))
8562 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8564 /* Similarly for a NEGATE_EXPR. */
8565 else if ((code == EQ_EXPR || code == NE_EXPR)
8566 && TREE_CODE (arg0) == NEGATE_EXPR
8567 && TREE_CODE (arg1) == INTEGER_CST
8568 && 0 != (tem = negate_expr (arg1))
8569 && TREE_CODE (tem) == INTEGER_CST
8570 && ! TREE_CONSTANT_OVERFLOW (tem))
8571 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8573 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8574 for !=. Don't do this for ordered comparisons due to overflow. */
8575 else if ((code == NE_EXPR || code == EQ_EXPR)
8576 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8577 return fold (build2 (code, type,
8578 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8580 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8581 && TREE_CODE (arg0) == NOP_EXPR)
8583 /* If we are widening one operand of an integer comparison,
8584 see if the other operand is similarly being widened. Perhaps we
8585 can do the comparison in the narrower type. */
8586 tem = fold_widened_comparison (code, type, arg0, arg1);
8590 /* Or if we are changing signedness. */
8591 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8596 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8597 constant, we can simplify it. */
8598 else if (TREE_CODE (arg1) == INTEGER_CST
8599 && (TREE_CODE (arg0) == MIN_EXPR
8600 || TREE_CODE (arg0) == MAX_EXPR)
8601 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8602 return optimize_minmax_comparison (t);
8604 /* If we are comparing an ABS_EXPR with a constant, we can
8605 convert all the cases into explicit comparisons, but they may
8606 well not be faster than doing the ABS and one comparison.
8607 But ABS (X) <= C is a range comparison, which becomes a subtraction
8608 and a comparison, and is probably faster. */
8609 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8610 && TREE_CODE (arg0) == ABS_EXPR
8611 && ! TREE_SIDE_EFFECTS (arg0)
8612 && (0 != (tem = negate_expr (arg1)))
8613 && TREE_CODE (tem) == INTEGER_CST
8614 && ! TREE_CONSTANT_OVERFLOW (tem))
8615 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8616 build2 (GE_EXPR, type,
8617 TREE_OPERAND (arg0, 0), tem),
8618 build2 (LE_EXPR, type,
8619 TREE_OPERAND (arg0, 0), arg1)));
8621 /* If this is an EQ or NE comparison with zero and ARG0 is
8622 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8623 two operations, but the latter can be done in one less insn
8624 on machines that have only two-operand insns or on which a
8625 constant cannot be the first operand. */
8626 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8627 && TREE_CODE (arg0) == BIT_AND_EXPR)
8629 tree arg00 = TREE_OPERAND (arg0, 0);
8630 tree arg01 = TREE_OPERAND (arg0, 1);
8631 if (TREE_CODE (arg00) == LSHIFT_EXPR
8632 && integer_onep (TREE_OPERAND (arg00, 0)))
8634 fold (build2 (code, type,
8635 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8636 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8637 arg01, TREE_OPERAND (arg00, 1)),
8638 fold_convert (TREE_TYPE (arg0),
8641 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8642 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8644 fold (build2 (code, type,
8645 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8646 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8647 arg00, TREE_OPERAND (arg01, 1)),
8648 fold_convert (TREE_TYPE (arg0),
8653 /* If this is an NE or EQ comparison of zero against the result of a
8654 signed MOD operation whose second operand is a power of 2, make
8655 the MOD operation unsigned since it is simpler and equivalent. */
8656 if ((code == NE_EXPR || code == EQ_EXPR)
8657 && integer_zerop (arg1)
8658 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8659 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8660 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8661 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8662 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8663 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8665 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8666 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8667 fold_convert (newtype,
8668 TREE_OPERAND (arg0, 0)),
8669 fold_convert (newtype,
8670 TREE_OPERAND (arg0, 1))));
8672 return fold (build2 (code, type, newmod,
8673 fold_convert (newtype, arg1)));
8676 /* If this is an NE comparison of zero with an AND of one, remove the
8677 comparison since the AND will give the correct value. */
8678 if (code == NE_EXPR && integer_zerop (arg1)
8679 && TREE_CODE (arg0) == BIT_AND_EXPR
8680 && integer_onep (TREE_OPERAND (arg0, 1)))
8681 return fold_convert (type, arg0);
8683 /* If we have (A & C) == C where C is a power of 2, convert this into
8684 (A & C) != 0. Similarly for NE_EXPR. */
8685 if ((code == EQ_EXPR || code == NE_EXPR)
8686 && TREE_CODE (arg0) == BIT_AND_EXPR
8687 && integer_pow2p (TREE_OPERAND (arg0, 1))
8688 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8689 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8690 arg0, fold_convert (TREE_TYPE (arg0),
8691 integer_zero_node)));
8693 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8694 2, then fold the expression into shifts and logical operations. */
8695 tem = fold_single_bit_test (code, arg0, arg1, type);
8699 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8700 Similarly for NE_EXPR. */
8701 if ((code == EQ_EXPR || code == NE_EXPR)
8702 && TREE_CODE (arg0) == BIT_AND_EXPR
8703 && TREE_CODE (arg1) == INTEGER_CST
8704 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8706 tree notc = fold (build1 (BIT_NOT_EXPR,
8707 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8708 TREE_OPERAND (arg0, 1)));
8709 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8711 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8712 if (integer_nonzerop (dandnotc))
8713 return omit_one_operand (type, rslt, arg0);
8716 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8717 Similarly for NE_EXPR. */
8718 if ((code == EQ_EXPR || code == NE_EXPR)
8719 && TREE_CODE (arg0) == BIT_IOR_EXPR
8720 && TREE_CODE (arg1) == INTEGER_CST
8721 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8723 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8724 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8725 TREE_OPERAND (arg0, 1), notd));
8726 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8727 if (integer_nonzerop (candnotd))
8728 return omit_one_operand (type, rslt, arg0);
8731 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8732 and similarly for >= into !=. */
8733 if ((code == LT_EXPR || code == GE_EXPR)
8734 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8735 && TREE_CODE (arg1) == LSHIFT_EXPR
8736 && integer_onep (TREE_OPERAND (arg1, 0)))
8737 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8738 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8739 TREE_OPERAND (arg1, 1)),
8740 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8742 else if ((code == LT_EXPR || code == GE_EXPR)
8743 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8744 && (TREE_CODE (arg1) == NOP_EXPR
8745 || TREE_CODE (arg1) == CONVERT_EXPR)
8746 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8747 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8749 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8750 fold_convert (TREE_TYPE (arg0),
8751 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8752 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8754 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8756 /* Simplify comparison of something with itself. (For IEEE
8757 floating-point, we can only do some of these simplifications.) */
8758 if (operand_equal_p (arg0, arg1, 0))
8763 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8764 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8765 return constant_boolean_node (1, type);
8770 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8771 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8772 return constant_boolean_node (1, type);
8773 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8776 /* For NE, we can only do this simplification if integer
8777 or we don't honor IEEE floating point NaNs. */
8778 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8779 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8781 /* ... fall through ... */
8784 return constant_boolean_node (0, type);
8790 /* If we are comparing an expression that just has comparisons
8791 of two integer values, arithmetic expressions of those comparisons,
8792 and constants, we can simplify it. There are only three cases
8793 to check: the two values can either be equal, the first can be
8794 greater, or the second can be greater. Fold the expression for
8795 those three values. Since each value must be 0 or 1, we have
8796 eight possibilities, each of which corresponds to the constant 0
8797 or 1 or one of the six possible comparisons.
8799 This handles common cases like (a > b) == 0 but also handles
8800 expressions like ((x > y) - (y > x)) > 0, which supposedly
8801 occur in macroized code. */
8803 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8805 tree cval1 = 0, cval2 = 0;
8808 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8809 /* Don't handle degenerate cases here; they should already
8810 have been handled anyway. */
8811 && cval1 != 0 && cval2 != 0
8812 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8813 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8814 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8815 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8816 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8817 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8818 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8820 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8821 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8823 /* We can't just pass T to eval_subst in case cval1 or cval2
8824 was the same as ARG1. */
8827 = fold (build2 (code, type,
8828 eval_subst (arg0, cval1, maxval,
8832 = fold (build2 (code, type,
8833 eval_subst (arg0, cval1, maxval,
8837 = fold (build2 (code, type,
8838 eval_subst (arg0, cval1, minval,
8842 /* All three of these results should be 0 or 1. Confirm they
8843 are. Then use those values to select the proper code
8846 if ((integer_zerop (high_result)
8847 || integer_onep (high_result))
8848 && (integer_zerop (equal_result)
8849 || integer_onep (equal_result))
8850 && (integer_zerop (low_result)
8851 || integer_onep (low_result)))
8853 /* Make a 3-bit mask with the high-order bit being the
8854 value for `>', the next for '=', and the low for '<'. */
8855 switch ((integer_onep (high_result) * 4)
8856 + (integer_onep (equal_result) * 2)
8857 + integer_onep (low_result))
8861 return omit_one_operand (type, integer_zero_node, arg0);
8882 return omit_one_operand (type, integer_one_node, arg0);
8885 tem = build2 (code, type, cval1, cval2);
8887 return save_expr (tem);
8894 /* If this is a comparison of a field, we may be able to simplify it. */
8895 if (((TREE_CODE (arg0) == COMPONENT_REF
8896 && lang_hooks.can_use_bit_fields_p ())
8897 || TREE_CODE (arg0) == BIT_FIELD_REF)
8898 && (code == EQ_EXPR || code == NE_EXPR)
8899 /* Handle the constant case even without -O
8900 to make sure the warnings are given. */
8901 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8903 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8908 /* If this is a comparison of complex values and either or both sides
8909 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8910 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8911 This may prevent needless evaluations. */
8912 if ((code == EQ_EXPR || code == NE_EXPR)
8913 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8914 && (TREE_CODE (arg0) == COMPLEX_EXPR
8915 || TREE_CODE (arg1) == COMPLEX_EXPR
8916 || TREE_CODE (arg0) == COMPLEX_CST
8917 || TREE_CODE (arg1) == COMPLEX_CST))
8919 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8920 tree real0, imag0, real1, imag1;
8922 arg0 = save_expr (arg0);
8923 arg1 = save_expr (arg1);
8924 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8925 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8926 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8927 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8929 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8932 fold (build2 (code, type, real0, real1)),
8933 fold (build2 (code, type, imag0, imag1))));
8936 /* Optimize comparisons of strlen vs zero to a compare of the
8937 first character of the string vs zero. To wit,
8938 strlen(ptr) == 0 => *ptr == 0
8939 strlen(ptr) != 0 => *ptr != 0
8940 Other cases should reduce to one of these two (or a constant)
8941 due to the return value of strlen being unsigned. */
8942 if ((code == EQ_EXPR || code == NE_EXPR)
8943 && integer_zerop (arg1)
8944 && TREE_CODE (arg0) == CALL_EXPR)
8946 tree fndecl = get_callee_fndecl (arg0);
8950 && DECL_BUILT_IN (fndecl)
8951 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8952 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8953 && (arglist = TREE_OPERAND (arg0, 1))
8954 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8955 && ! TREE_CHAIN (arglist))
8956 return fold (build2 (code, type,
8957 build1 (INDIRECT_REF, char_type_node,
8958 TREE_VALUE (arglist)),
8959 fold_convert (char_type_node,
8960 integer_zero_node)));
8963 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8964 into a single range test. */
8965 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8966 && TREE_CODE (arg1) == INTEGER_CST
8967 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8968 && !integer_zerop (TREE_OPERAND (arg0, 1))
8969 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8970 && !TREE_OVERFLOW (arg1))
8972 t1 = fold_div_compare (code, type, arg0, arg1);
8973 if (t1 != NULL_TREE)
8977 if ((code == EQ_EXPR || code == NE_EXPR)
8978 && !TREE_SIDE_EFFECTS (arg0)
8979 && integer_zerop (arg1)
8980 && tree_expr_nonzero_p (arg0))
8981 return constant_boolean_node (code==NE_EXPR, type);
8983 t1 = fold_relational_const (code, type, arg0, arg1);
8984 return t1 == NULL_TREE ? t : t1;
8986 case UNORDERED_EXPR:
8994 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8996 t1 = fold_relational_const (code, type, arg0, arg1);
8997 if (t1 != NULL_TREE)
9001 /* If the first operand is NaN, the result is constant. */
9002 if (TREE_CODE (arg0) == REAL_CST
9003 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9004 && (code != LTGT_EXPR || ! flag_trapping_math))
9006 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9009 return omit_one_operand (type, t1, arg1);
9012 /* If the second operand is NaN, the result is constant. */
9013 if (TREE_CODE (arg1) == REAL_CST
9014 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9015 && (code != LTGT_EXPR || ! flag_trapping_math))
9017 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9020 return omit_one_operand (type, t1, arg0);
9023 /* Simplify unordered comparison of something with itself. */
9024 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9025 && operand_equal_p (arg0, arg1, 0))
9026 return constant_boolean_node (1, type);
9028 if (code == LTGT_EXPR
9029 && !flag_trapping_math
9030 && operand_equal_p (arg0, arg1, 0))
9031 return constant_boolean_node (0, type);
9033 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9035 tree targ0 = strip_float_extensions (arg0);
9036 tree targ1 = strip_float_extensions (arg1);
9037 tree newtype = TREE_TYPE (targ0);
9039 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9040 newtype = TREE_TYPE (targ1);
9042 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9043 return fold (build2 (code, type, fold_convert (newtype, targ0),
9044 fold_convert (newtype, targ1)));
9050 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9051 so all simple results must be passed through pedantic_non_lvalue. */
9052 if (TREE_CODE (arg0) == INTEGER_CST)
9054 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9055 /* Only optimize constant conditions when the selected branch
9056 has the same type as the COND_EXPR. This avoids optimizing
9057 away "c ? x : throw", where the throw has a void type. */
9058 if (! VOID_TYPE_P (TREE_TYPE (tem))
9059 || VOID_TYPE_P (type))
9060 return pedantic_non_lvalue (tem);
9063 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9064 return pedantic_omit_one_operand (type, arg1, arg0);
9066 /* If we have A op B ? A : C, we may be able to convert this to a
9067 simpler expression, depending on the operation and the values
9068 of B and C. Signed zeros prevent all of these transformations,
9069 for reasons given above each one.
9071 Also try swapping the arguments and inverting the conditional. */
9072 if (COMPARISON_CLASS_P (arg0)
9073 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9074 arg1, TREE_OPERAND (arg0, 1))
9075 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9077 tem = fold_cond_expr_with_comparison (type, arg0,
9078 TREE_OPERAND (t, 1),
9079 TREE_OPERAND (t, 2));
9084 if (COMPARISON_CLASS_P (arg0)
9085 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9086 TREE_OPERAND (t, 2),
9087 TREE_OPERAND (arg0, 1))
9088 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9090 tem = invert_truthvalue (arg0);
9091 if (COMPARISON_CLASS_P (tem))
9093 tem = fold_cond_expr_with_comparison (type, tem,
9094 TREE_OPERAND (t, 2),
9095 TREE_OPERAND (t, 1));
9101 /* If the second operand is simpler than the third, swap them
9102 since that produces better jump optimization results. */
9103 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9104 TREE_OPERAND (t, 2), false))
9106 /* See if this can be inverted. If it can't, possibly because
9107 it was a floating-point inequality comparison, don't do
9109 tem = invert_truthvalue (arg0);
9111 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9112 return fold (build3 (code, type, tem,
9113 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9116 /* Convert A ? 1 : 0 to simply A. */
9117 if (integer_onep (TREE_OPERAND (t, 1))
9118 && integer_zerop (TREE_OPERAND (t, 2))
9119 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9120 call to fold will try to move the conversion inside
9121 a COND, which will recurse. In that case, the COND_EXPR
9122 is probably the best choice, so leave it alone. */
9123 && type == TREE_TYPE (arg0))
9124 return pedantic_non_lvalue (arg0);
9126 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9127 over COND_EXPR in cases such as floating point comparisons. */
9128 if (integer_zerop (TREE_OPERAND (t, 1))
9129 && integer_onep (TREE_OPERAND (t, 2))
9130 && truth_value_p (TREE_CODE (arg0)))
9131 return pedantic_non_lvalue (fold_convert (type,
9132 invert_truthvalue (arg0)));
9134 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9135 if (TREE_CODE (arg0) == LT_EXPR
9136 && integer_zerop (TREE_OPERAND (arg0, 1))
9137 && integer_zerop (TREE_OPERAND (t, 2))
9138 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9139 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9140 TREE_TYPE (tem), tem, arg1)));
9142 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9143 already handled above. */
9144 if (TREE_CODE (arg0) == BIT_AND_EXPR
9145 && integer_onep (TREE_OPERAND (arg0, 1))
9146 && integer_zerop (TREE_OPERAND (t, 2))
9147 && integer_pow2p (arg1))
9149 tree tem = TREE_OPERAND (arg0, 0);
9151 if (TREE_CODE (tem) == RSHIFT_EXPR
9152 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9153 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9154 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9155 return fold (build2 (BIT_AND_EXPR, type,
9156 TREE_OPERAND (tem, 0), arg1));
9159 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9160 is probably obsolete because the first operand should be a
9161 truth value (that's why we have the two cases above), but let's
9162 leave it in until we can confirm this for all front-ends. */
9163 if (integer_zerop (TREE_OPERAND (t, 2))
9164 && TREE_CODE (arg0) == NE_EXPR
9165 && integer_zerop (TREE_OPERAND (arg0, 1))
9166 && integer_pow2p (arg1)
9167 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9168 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9169 arg1, OEP_ONLY_CONST))
9170 return pedantic_non_lvalue (fold_convert (type,
9171 TREE_OPERAND (arg0, 0)));
9173 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9174 if (integer_zerop (TREE_OPERAND (t, 2))
9175 && truth_value_p (TREE_CODE (arg0))
9176 && truth_value_p (TREE_CODE (arg1)))
9177 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9179 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9180 if (integer_onep (TREE_OPERAND (t, 2))
9181 && truth_value_p (TREE_CODE (arg0))
9182 && truth_value_p (TREE_CODE (arg1)))
9184 /* Only perform transformation if ARG0 is easily inverted. */
9185 tem = invert_truthvalue (arg0);
9186 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9187 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9190 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9191 if (integer_zerop (arg1)
9192 && truth_value_p (TREE_CODE (arg0))
9193 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9195 /* Only perform transformation if ARG0 is easily inverted. */
9196 tem = invert_truthvalue (arg0);
9197 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9198 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9199 TREE_OPERAND (t, 2)));
9202 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9203 if (integer_onep (arg1)
9204 && truth_value_p (TREE_CODE (arg0))
9205 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9206 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9207 TREE_OPERAND (t, 2)));
9212 /* When pedantic, a compound expression can be neither an lvalue
9213 nor an integer constant expression. */
9214 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9216 /* Don't let (0, 0) be null pointer constant. */
9217 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9218 : fold_convert (type, arg1);
9219 return pedantic_non_lvalue (tem);
9223 return build_complex (type, arg0, arg1);
9227 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9229 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9230 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9231 TREE_OPERAND (arg0, 1));
9232 else if (TREE_CODE (arg0) == COMPLEX_CST)
9233 return TREE_REALPART (arg0);
9234 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9235 return fold (build2 (TREE_CODE (arg0), type,
9236 fold (build1 (REALPART_EXPR, type,
9237 TREE_OPERAND (arg0, 0))),
9238 fold (build1 (REALPART_EXPR, type,
9239 TREE_OPERAND (arg0, 1)))));
9243 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9244 return fold_convert (type, integer_zero_node);
9245 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9246 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9247 TREE_OPERAND (arg0, 0));
9248 else if (TREE_CODE (arg0) == COMPLEX_CST)
9249 return TREE_IMAGPART (arg0);
9250 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9251 return fold (build2 (TREE_CODE (arg0), type,
9252 fold (build1 (IMAGPART_EXPR, type,
9253 TREE_OPERAND (arg0, 0))),
9254 fold (build1 (IMAGPART_EXPR, type,
9255 TREE_OPERAND (arg0, 1)))));
9259 /* Check for a built-in function. */
9260 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9261 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9263 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9265 tree tmp = fold_builtin (t, false);
9273 } /* switch (code) */
9276 #ifdef ENABLE_FOLD_CHECKING
9279 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9280 static void fold_check_failed (tree, tree);
9281 void print_fold_checksum (tree);
9283 /* When --enable-checking=fold, compute a digest of expr before
9284 and after actual fold call to see if fold did not accidentally
9285 change original expr. */
9292 unsigned char checksum_before[16], checksum_after[16];
9295 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9296 md5_init_ctx (&ctx);
9297 fold_checksum_tree (expr, &ctx, ht);
9298 md5_finish_ctx (&ctx, checksum_before);
9301 ret = fold_1 (expr);
9303 md5_init_ctx (&ctx);
9304 fold_checksum_tree (expr, &ctx, ht);
9305 md5_finish_ctx (&ctx, checksum_after);
9308 if (memcmp (checksum_before, checksum_after, 16))
9309 fold_check_failed (expr, ret);
9315 print_fold_checksum (tree expr)
9318 unsigned char checksum[16], cnt;
9321 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9322 md5_init_ctx (&ctx);
9323 fold_checksum_tree (expr, &ctx, ht);
9324 md5_finish_ctx (&ctx, checksum);
9326 for (cnt = 0; cnt < 16; ++cnt)
9327 fprintf (stderr, "%02x", checksum[cnt]);
9328 putc ('\n', stderr);
9332 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9334 internal_error ("fold check: original tree changed by fold");
9338 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9341 enum tree_code code;
9342 char buf[sizeof (struct tree_decl)];
9345 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9346 <= sizeof (struct tree_decl))
9347 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9350 slot = htab_find_slot (ht, expr, INSERT);
9354 code = TREE_CODE (expr);
9355 if (TREE_CODE_CLASS (code) == tcc_declaration
9356 && DECL_ASSEMBLER_NAME_SET_P (expr))
9358 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9359 memcpy (buf, expr, tree_size (expr));
9361 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9363 else if (TREE_CODE_CLASS (code) == tcc_type
9364 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9365 || TYPE_CACHED_VALUES_P (expr)))
9367 /* Allow these fields to be modified. */
9368 memcpy (buf, expr, tree_size (expr));
9370 TYPE_POINTER_TO (expr) = NULL;
9371 TYPE_REFERENCE_TO (expr) = NULL;
9372 TYPE_CACHED_VALUES_P (expr) = 0;
9373 TYPE_CACHED_VALUES (expr) = NULL;
9375 md5_process_bytes (expr, tree_size (expr), ctx);
9376 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9377 if (TREE_CODE_CLASS (code) != tcc_type
9378 && TREE_CODE_CLASS (code) != tcc_declaration)
9379 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9380 switch (TREE_CODE_CLASS (code))
9386 md5_process_bytes (TREE_STRING_POINTER (expr),
9387 TREE_STRING_LENGTH (expr), ctx);
9390 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9391 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9394 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9400 case tcc_exceptional:
9404 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9405 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9408 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9409 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9415 case tcc_expression:
9417 case tcc_comparison:
9421 len = TREE_CODE_LENGTH (code);
9422 for (i = 0; i < len; ++i)
9423 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9425 case tcc_declaration:
9426 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9427 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9428 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9429 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9430 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9431 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9432 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9433 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9434 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9435 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9436 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9439 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9440 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9441 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9442 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9443 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9444 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9445 if (INTEGRAL_TYPE_P (expr)
9446 || SCALAR_FLOAT_TYPE_P (expr))
9448 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9449 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9451 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9452 if (TREE_CODE (expr) == RECORD_TYPE
9453 || TREE_CODE (expr) == UNION_TYPE
9454 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9455 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9456 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9465 /* Perform constant folding and related simplification of initializer
9466 expression EXPR. This behaves identically to "fold" but ignores
9467 potential run-time traps and exceptions that fold must preserve. */
9470 fold_initializer (tree expr)
9472 int saved_signaling_nans = flag_signaling_nans;
9473 int saved_trapping_math = flag_trapping_math;
9474 int saved_trapv = flag_trapv;
9477 flag_signaling_nans = 0;
9478 flag_trapping_math = 0;
9481 result = fold (expr);
9483 flag_signaling_nans = saved_signaling_nans;
9484 flag_trapping_math = saved_trapping_math;
9485 flag_trapv = saved_trapv;
9490 /* Determine if first argument is a multiple of second argument. Return 0 if
9491 it is not, or we cannot easily determined it to be.
9493 An example of the sort of thing we care about (at this point; this routine
9494 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9495 fold cases do now) is discovering that
9497 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9503 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9505 This code also handles discovering that
9507 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9509 is a multiple of 8 so we don't have to worry about dealing with a
9512 Note that we *look* inside a SAVE_EXPR only to determine how it was
9513 calculated; it is not safe for fold to do much of anything else with the
9514 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9515 at run time. For example, the latter example above *cannot* be implemented
9516 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9517 evaluation time of the original SAVE_EXPR is not necessarily the same at
9518 the time the new expression is evaluated. The only optimization of this
9519 sort that would be valid is changing
9521 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9525 SAVE_EXPR (I) * SAVE_EXPR (J)
9527 (where the same SAVE_EXPR (J) is used in the original and the
9528 transformed version). */
9531 multiple_of_p (tree type, tree top, tree bottom)
9533 if (operand_equal_p (top, bottom, 0))
9536 if (TREE_CODE (type) != INTEGER_TYPE)
9539 switch (TREE_CODE (top))
9542 /* Bitwise and provides a power of two multiple. If the mask is
9543 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
9544 if (!integer_pow2p (bottom))
9549 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9550 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9554 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9555 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9558 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9562 op1 = TREE_OPERAND (top, 1);
9563 /* const_binop may not detect overflow correctly,
9564 so check for it explicitly here. */
9565 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9566 > TREE_INT_CST_LOW (op1)
9567 && TREE_INT_CST_HIGH (op1) == 0
9568 && 0 != (t1 = fold_convert (type,
9569 const_binop (LSHIFT_EXPR,
9572 && ! TREE_OVERFLOW (t1))
9573 return multiple_of_p (type, t1, bottom);
9578 /* Can't handle conversions from non-integral or wider integral type. */
9579 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9580 || (TYPE_PRECISION (type)
9581 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9584 /* .. fall through ... */
9587 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9590 if (TREE_CODE (bottom) != INTEGER_CST
9591 || (TYPE_UNSIGNED (type)
9592 && (tree_int_cst_sgn (top) < 0
9593 || tree_int_cst_sgn (bottom) < 0)))
9595 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9603 /* Return true if `t' is known to be non-negative. */
9606 tree_expr_nonnegative_p (tree t)
9608 switch (TREE_CODE (t))
9614 return tree_int_cst_sgn (t) >= 0;
9617 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9620 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9621 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9622 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9624 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9625 both unsigned and at least 2 bits shorter than the result. */
9626 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9627 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9628 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9630 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9631 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9632 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9633 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9635 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9636 TYPE_PRECISION (inner2)) + 1;
9637 return prec < TYPE_PRECISION (TREE_TYPE (t));
9643 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9645 /* x * x for floating point x is always non-negative. */
9646 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9648 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9649 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9652 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9653 both unsigned and their total bits is shorter than the result. */
9654 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9655 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9656 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9658 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9659 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9660 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9661 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9662 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9663 < TYPE_PRECISION (TREE_TYPE (t));
9667 case TRUNC_DIV_EXPR:
9669 case FLOOR_DIV_EXPR:
9670 case ROUND_DIV_EXPR:
9671 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9672 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9674 case TRUNC_MOD_EXPR:
9676 case FLOOR_MOD_EXPR:
9677 case ROUND_MOD_EXPR:
9678 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9681 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9682 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9685 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9686 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9689 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9690 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9694 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9695 tree outer_type = TREE_TYPE (t);
9697 if (TREE_CODE (outer_type) == REAL_TYPE)
9699 if (TREE_CODE (inner_type) == REAL_TYPE)
9700 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9701 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9703 if (TYPE_UNSIGNED (inner_type))
9705 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9708 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9710 if (TREE_CODE (inner_type) == REAL_TYPE)
9711 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9712 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9713 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9714 && TYPE_UNSIGNED (inner_type);
9720 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9721 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9723 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9725 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9726 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9728 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9729 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9731 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9733 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9735 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9736 case NON_LVALUE_EXPR:
9737 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9739 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9743 tree temp = TARGET_EXPR_SLOT (t);
9744 t = TARGET_EXPR_INITIAL (t);
9746 /* If the initializer is non-void, then it's a normal expression
9747 that will be assigned to the slot. */
9748 if (!VOID_TYPE_P (t))
9749 return tree_expr_nonnegative_p (t);
9751 /* Otherwise, the initializer sets the slot in some way. One common
9752 way is an assignment statement at the end of the initializer. */
9755 if (TREE_CODE (t) == BIND_EXPR)
9756 t = expr_last (BIND_EXPR_BODY (t));
9757 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9758 || TREE_CODE (t) == TRY_CATCH_EXPR)
9759 t = expr_last (TREE_OPERAND (t, 0));
9760 else if (TREE_CODE (t) == STATEMENT_LIST)
9765 if (TREE_CODE (t) == MODIFY_EXPR
9766 && TREE_OPERAND (t, 0) == temp)
9767 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9774 tree fndecl = get_callee_fndecl (t);
9775 tree arglist = TREE_OPERAND (t, 1);
9777 && DECL_BUILT_IN (fndecl)
9778 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9779 switch (DECL_FUNCTION_CODE (fndecl))
9781 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9782 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9783 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9784 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9786 CASE_BUILTIN_F (BUILT_IN_ACOS)
9787 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9788 CASE_BUILTIN_F (BUILT_IN_CABS)
9789 CASE_BUILTIN_F (BUILT_IN_COSH)
9790 CASE_BUILTIN_F (BUILT_IN_ERFC)
9791 CASE_BUILTIN_F (BUILT_IN_EXP)
9792 CASE_BUILTIN_F (BUILT_IN_EXP10)
9793 CASE_BUILTIN_F (BUILT_IN_EXP2)
9794 CASE_BUILTIN_F (BUILT_IN_FABS)
9795 CASE_BUILTIN_F (BUILT_IN_FDIM)
9796 CASE_BUILTIN_F (BUILT_IN_FREXP)
9797 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9798 CASE_BUILTIN_F (BUILT_IN_POW10)
9799 CASE_BUILTIN_I (BUILT_IN_FFS)
9800 CASE_BUILTIN_I (BUILT_IN_PARITY)
9801 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9805 CASE_BUILTIN_F (BUILT_IN_SQRT)
9806 /* sqrt(-0.0) is -0.0. */
9807 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9809 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9811 CASE_BUILTIN_F (BUILT_IN_ASINH)
9812 CASE_BUILTIN_F (BUILT_IN_ATAN)
9813 CASE_BUILTIN_F (BUILT_IN_ATANH)
9814 CASE_BUILTIN_F (BUILT_IN_CBRT)
9815 CASE_BUILTIN_F (BUILT_IN_CEIL)
9816 CASE_BUILTIN_F (BUILT_IN_ERF)
9817 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9818 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9819 CASE_BUILTIN_F (BUILT_IN_FMOD)
9820 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9821 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9822 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9823 CASE_BUILTIN_F (BUILT_IN_LRINT)
9824 CASE_BUILTIN_F (BUILT_IN_LROUND)
9825 CASE_BUILTIN_F (BUILT_IN_MODF)
9826 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9827 CASE_BUILTIN_F (BUILT_IN_POW)
9828 CASE_BUILTIN_F (BUILT_IN_RINT)
9829 CASE_BUILTIN_F (BUILT_IN_ROUND)
9830 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9831 CASE_BUILTIN_F (BUILT_IN_SINH)
9832 CASE_BUILTIN_F (BUILT_IN_TANH)
9833 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9834 /* True if the 1st argument is nonnegative. */
9835 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9837 CASE_BUILTIN_F (BUILT_IN_FMAX)
9838 /* True if the 1st OR 2nd arguments are nonnegative. */
9839 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9840 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9842 CASE_BUILTIN_F (BUILT_IN_FMIN)
9843 /* True if the 1st AND 2nd arguments are nonnegative. */
9844 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9845 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9847 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9848 /* True if the 2nd argument is nonnegative. */
9849 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9853 #undef CASE_BUILTIN_F
9854 #undef CASE_BUILTIN_I
9858 /* ... fall through ... */
9861 if (truth_value_p (TREE_CODE (t)))
9862 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9866 /* We don't know sign of `t', so be conservative and return false. */
9870 /* Return true when T is an address and is known to be nonzero.
9871 For floating point we further ensure that T is not denormal.
9872 Similar logic is present in nonzero_address in rtlanal.h. */
9875 tree_expr_nonzero_p (tree t)
9877 tree type = TREE_TYPE (t);
9879 /* Doing something useful for floating point would need more work. */
9880 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9883 switch (TREE_CODE (t))
9886 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9887 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9890 /* We used to test for !integer_zerop here. This does not work correctly
9891 if TREE_CONSTANT_OVERFLOW (t). */
9892 return (TREE_INT_CST_LOW (t) != 0
9893 || TREE_INT_CST_HIGH (t) != 0);
9896 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9898 /* With the presence of negative values it is hard
9899 to say something. */
9900 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9901 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9903 /* One of operands must be positive and the other non-negative. */
9904 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9905 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9910 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9912 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9913 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9919 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9920 tree outer_type = TREE_TYPE (t);
9922 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9923 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9929 tree base = get_base_address (TREE_OPERAND (t, 0));
9934 /* Weak declarations may link to NULL. */
9936 return !DECL_WEAK (base);
9938 /* Constants are never weak. */
9939 if (CONSTANT_CLASS_P (base))
9946 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9947 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9950 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9951 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9954 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9956 /* When both operands are nonzero, then MAX must be too. */
9957 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9960 /* MAX where operand 0 is positive is positive. */
9961 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9963 /* MAX where operand 1 is positive is positive. */
9964 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9965 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9972 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9975 case NON_LVALUE_EXPR:
9976 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9979 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9980 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9988 /* See if we are applying CODE, a relational to the highest or lowest
9989 possible integer of TYPE. If so, then the result is a compile
9993 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9998 enum tree_code code = *code_p;
9999 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10001 if (TREE_CODE (op1) == INTEGER_CST
10002 && ! TREE_CONSTANT_OVERFLOW (op1)
10003 && width <= HOST_BITS_PER_WIDE_INT
10004 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10005 || POINTER_TYPE_P (TREE_TYPE (op1))))
10007 unsigned HOST_WIDE_INT signed_max;
10008 unsigned HOST_WIDE_INT max, min;
10010 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10012 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10014 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10020 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10023 if (TREE_INT_CST_HIGH (op1) == 0
10024 && TREE_INT_CST_LOW (op1) == max)
10028 return omit_one_operand (type, integer_zero_node, op0);
10034 return omit_one_operand (type, integer_one_node, op0);
10040 /* The GE_EXPR and LT_EXPR cases above are not normally
10041 reached because of previous transformations. */
10046 else if (TREE_INT_CST_HIGH (op1) == 0
10047 && TREE_INT_CST_LOW (op1) == max - 1)
10052 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10056 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10061 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10062 && TREE_INT_CST_LOW (op1) == min)
10066 return omit_one_operand (type, integer_zero_node, op0);
10073 return omit_one_operand (type, integer_one_node, op0);
10082 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10083 && TREE_INT_CST_LOW (op1) == min + 1)
10088 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10092 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10098 else if (TREE_INT_CST_HIGH (op1) == 0
10099 && TREE_INT_CST_LOW (op1) == signed_max
10100 && TYPE_UNSIGNED (TREE_TYPE (op1))
10101 /* signed_type does not work on pointer types. */
10102 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10104 /* The following case also applies to X < signed_max+1
10105 and X >= signed_max+1 because previous transformations. */
10106 if (code == LE_EXPR || code == GT_EXPR)
10108 tree st0, st1, exp, retval;
10109 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10110 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10112 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10114 fold_convert (st0, op0),
10115 fold_convert (st1, integer_zero_node));
10117 retval = fold_binary_to_constant (TREE_CODE (exp),
10119 TREE_OPERAND (exp, 0),
10120 TREE_OPERAND (exp, 1));
10122 /* If we are in gimple form, then returning EXP would create
10123 non-gimple expressions. Clearing it is safe and insures
10124 we do not allow a non-gimple expression to escape. */
10125 if (in_gimple_form)
10128 return (retval ? retval : exp);
10137 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10138 attempt to fold the expression to a constant without modifying TYPE,
10141 If the expression could be simplified to a constant, then return
10142 the constant. If the expression would not be simplified to a
10143 constant, then return NULL_TREE.
10145 Note this is primarily designed to be called after gimplification
10146 of the tree structures and when at least one operand is a constant.
10147 As a result of those simplifying assumptions this routine is far
10148 simpler than the generic fold routine. */
10151 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10158 /* If this is a commutative operation, and ARG0 is a constant, move it
10159 to ARG1 to reduce the number of tests below. */
10160 if (commutative_tree_code (code)
10161 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10168 /* If either operand is a complex type, extract its real component. */
10169 if (TREE_CODE (op0) == COMPLEX_CST)
10170 subop0 = TREE_REALPART (op0);
10174 if (TREE_CODE (op1) == COMPLEX_CST)
10175 subop1 = TREE_REALPART (op1);
10179 /* Note if either argument is not a real or integer constant.
10180 With a few exceptions, simplification is limited to cases
10181 where both arguments are constants. */
10182 if ((TREE_CODE (subop0) != INTEGER_CST
10183 && TREE_CODE (subop0) != REAL_CST)
10184 || (TREE_CODE (subop1) != INTEGER_CST
10185 && TREE_CODE (subop1) != REAL_CST))
10191 /* (plus (address) (const_int)) is a constant. */
10192 if (TREE_CODE (op0) == PLUS_EXPR
10193 && TREE_CODE (op1) == INTEGER_CST
10194 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10195 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10196 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10198 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10200 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10201 const_binop (PLUS_EXPR, op1,
10202 TREE_OPERAND (op0, 1), 0));
10210 /* Both arguments are constants. Simplify. */
10211 tem = const_binop (code, op0, op1, 0);
10212 if (tem != NULL_TREE)
10214 /* The return value should always have the same type as
10215 the original expression. */
10216 if (TREE_TYPE (tem) != type)
10217 tem = fold_convert (type, tem);
10224 /* Fold &x - &x. This can happen from &x.foo - &x.
10225 This is unsafe for certain floats even in non-IEEE formats.
10226 In IEEE, it is unsafe because it does wrong for NaNs.
10227 Also note that operand_equal_p is always false if an
10228 operand is volatile. */
10229 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10230 return fold_convert (type, integer_zero_node);
10236 /* Special case multiplication or bitwise AND where one argument
10238 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10239 return omit_one_operand (type, op1, op0);
10241 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10242 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10243 && real_zerop (op1))
10244 return omit_one_operand (type, op1, op0);
10249 /* Special case when we know the result will be all ones. */
10250 if (integer_all_onesp (op1))
10251 return omit_one_operand (type, op1, op0);
10255 case TRUNC_DIV_EXPR:
10256 case ROUND_DIV_EXPR:
10257 case FLOOR_DIV_EXPR:
10258 case CEIL_DIV_EXPR:
10259 case EXACT_DIV_EXPR:
10260 case TRUNC_MOD_EXPR:
10261 case ROUND_MOD_EXPR:
10262 case FLOOR_MOD_EXPR:
10263 case CEIL_MOD_EXPR:
10265 /* Division by zero is undefined. */
10266 if (integer_zerop (op1))
10269 if (TREE_CODE (op1) == REAL_CST
10270 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10271 && real_zerop (op1))
10277 if (INTEGRAL_TYPE_P (type)
10278 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10279 return omit_one_operand (type, op1, op0);
10284 if (INTEGRAL_TYPE_P (type)
10285 && TYPE_MAX_VALUE (type)
10286 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10287 return omit_one_operand (type, op1, op0);
10292 /* Optimize -1 >> x for arithmetic right shifts. */
10293 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10294 return omit_one_operand (type, op0, op1);
10295 /* ... fall through ... */
10298 if (integer_zerop (op0))
10299 return omit_one_operand (type, op0, op1);
10301 /* Since negative shift count is not well-defined, don't
10302 try to compute it in the compiler. */
10303 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10310 /* -1 rotated either direction by any amount is still -1. */
10311 if (integer_all_onesp (op0))
10312 return omit_one_operand (type, op0, op1);
10314 /* 0 rotated either direction by any amount is still zero. */
10315 if (integer_zerop (op0))
10316 return omit_one_operand (type, op0, op1);
10322 return build_complex (type, op0, op1);
10331 /* If one arg is a real or integer constant, put it last. */
10332 if ((TREE_CODE (op0) == INTEGER_CST
10333 && TREE_CODE (op1) != INTEGER_CST)
10334 || (TREE_CODE (op0) == REAL_CST
10335 && TREE_CODE (op0) != REAL_CST))
10342 code = swap_tree_comparison (code);
10345 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10346 This transformation affects the cases which are handled in later
10347 optimizations involving comparisons with non-negative constants. */
10348 if (TREE_CODE (op1) == INTEGER_CST
10349 && TREE_CODE (op0) != INTEGER_CST
10350 && tree_int_cst_sgn (op1) > 0)
10356 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10361 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10369 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10373 /* Fall through. */
10376 case UNORDERED_EXPR:
10386 return fold_relational_const (code, type, op0, op1);
10389 /* This could probably be handled. */
10392 case TRUTH_AND_EXPR:
10393 /* If second arg is constant zero, result is zero, but first arg
10394 must be evaluated. */
10395 if (integer_zerop (op1))
10396 return omit_one_operand (type, op1, op0);
10397 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10398 case will be handled here. */
10399 if (integer_zerop (op0))
10400 return omit_one_operand (type, op0, op1);
10401 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10402 return constant_boolean_node (true, type);
10405 case TRUTH_OR_EXPR:
10406 /* If second arg is constant true, result is true, but we must
10407 evaluate first arg. */
10408 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10409 return omit_one_operand (type, op1, op0);
10410 /* Likewise for first arg, but note this only occurs here for
10412 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10413 return omit_one_operand (type, op0, op1);
10414 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10415 return constant_boolean_node (false, type);
10418 case TRUTH_XOR_EXPR:
10419 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10421 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10422 return constant_boolean_node (x, type);
10431 /* Given the components of a unary expression CODE, TYPE and OP0,
10432 attempt to fold the expression to a constant without modifying
10435 If the expression could be simplified to a constant, then return
10436 the constant. If the expression would not be simplified to a
10437 constant, then return NULL_TREE.
10439 Note this is primarily designed to be called after gimplification
10440 of the tree structures and when op0 is a constant. As a result
10441 of those simplifying assumptions this routine is far simpler than
10442 the generic fold routine. */
10445 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10447 /* Make sure we have a suitable constant argument. */
10448 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10452 if (TREE_CODE (op0) == COMPLEX_CST)
10453 subop = TREE_REALPART (op0);
10457 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10466 case FIX_TRUNC_EXPR:
10467 case FIX_FLOOR_EXPR:
10468 case FIX_CEIL_EXPR:
10469 return fold_convert_const (code, type, op0);
10472 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10473 return fold_negate_const (op0, type);
10478 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10479 return fold_abs_const (op0, type);
10484 if (TREE_CODE (op0) == INTEGER_CST)
10485 return fold_not_const (op0, type);
10489 case REALPART_EXPR:
10490 if (TREE_CODE (op0) == COMPLEX_CST)
10491 return TREE_REALPART (op0);
10495 case IMAGPART_EXPR:
10496 if (TREE_CODE (op0) == COMPLEX_CST)
10497 return TREE_IMAGPART (op0);
10502 if (TREE_CODE (op0) == COMPLEX_CST
10503 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10504 return build_complex (type, TREE_REALPART (op0),
10505 negate_expr (TREE_IMAGPART (op0)));
10513 /* If EXP represents referencing an element in a constant string
10514 (either via pointer arithmetic or array indexing), return the
10515 tree representing the value accessed, otherwise return NULL. */
10518 fold_read_from_constant_string (tree exp)
10520 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10522 tree exp1 = TREE_OPERAND (exp, 0);
10526 if (TREE_CODE (exp) == INDIRECT_REF)
10527 string = string_constant (exp1, &index);
10530 tree low_bound = array_ref_low_bound (exp);
10531 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10533 /* Optimize the special-case of a zero lower bound.
10535 We convert the low_bound to sizetype to avoid some problems
10536 with constant folding. (E.g. suppose the lower bound is 1,
10537 and its mode is QI. Without the conversion,l (ARRAY
10538 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10539 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10540 if (! integer_zerop (low_bound))
10541 index = size_diffop (index, fold_convert (sizetype, low_bound));
10547 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10548 && TREE_CODE (string) == STRING_CST
10549 && TREE_CODE (index) == INTEGER_CST
10550 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10551 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10553 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10554 return fold_convert (TREE_TYPE (exp),
10555 build_int_cst (NULL_TREE,
10556 (TREE_STRING_POINTER (string)
10557 [TREE_INT_CST_LOW (index)])));
10562 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10563 an integer constant or real constant.
10565 TYPE is the type of the result. */
10568 fold_negate_const (tree arg0, tree type)
10570 tree t = NULL_TREE;
10572 switch (TREE_CODE (arg0))
10576 unsigned HOST_WIDE_INT low;
10577 HOST_WIDE_INT high;
10578 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10579 TREE_INT_CST_HIGH (arg0),
10581 t = build_int_cst_wide (type, low, high);
10582 t = force_fit_type (t, 1,
10583 (overflow | TREE_OVERFLOW (arg0))
10584 && !TYPE_UNSIGNED (type),
10585 TREE_CONSTANT_OVERFLOW (arg0));
10590 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10594 gcc_unreachable ();
10600 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10601 an integer constant or real constant.
10603 TYPE is the type of the result. */
10606 fold_abs_const (tree arg0, tree type)
10608 tree t = NULL_TREE;
10610 switch (TREE_CODE (arg0))
10613 /* If the value is unsigned, then the absolute value is
10614 the same as the ordinary value. */
10615 if (TYPE_UNSIGNED (type))
10617 /* Similarly, if the value is non-negative. */
10618 else if (INT_CST_LT (integer_minus_one_node, arg0))
10620 /* If the value is negative, then the absolute value is
10624 unsigned HOST_WIDE_INT low;
10625 HOST_WIDE_INT high;
10626 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10627 TREE_INT_CST_HIGH (arg0),
10629 t = build_int_cst_wide (type, low, high);
10630 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10631 TREE_CONSTANT_OVERFLOW (arg0));
10636 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10637 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10643 gcc_unreachable ();
10649 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10650 constant. TYPE is the type of the result. */
10653 fold_not_const (tree arg0, tree type)
10655 tree t = NULL_TREE;
10657 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10659 t = build_int_cst_wide (type,
10660 ~ TREE_INT_CST_LOW (arg0),
10661 ~ TREE_INT_CST_HIGH (arg0));
10662 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10663 TREE_CONSTANT_OVERFLOW (arg0));
10668 /* Given CODE, a relational operator, the target type, TYPE and two
10669 constant operands OP0 and OP1, return the result of the
10670 relational operation. If the result is not a compile time
10671 constant, then return NULL_TREE. */
10674 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10676 int result, invert;
10678 /* From here on, the only cases we handle are when the result is
10679 known to be a constant. */
10681 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10683 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10684 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10686 /* Handle the cases where either operand is a NaN. */
10687 if (real_isnan (c0) || real_isnan (c1))
10697 case UNORDERED_EXPR:
10711 if (flag_trapping_math)
10717 gcc_unreachable ();
10720 return constant_boolean_node (result, type);
10723 return constant_boolean_node (real_compare (code, c0, c1), type);
10726 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10728 To compute GT, swap the arguments and do LT.
10729 To compute GE, do LT and invert the result.
10730 To compute LE, swap the arguments, do LT and invert the result.
10731 To compute NE, do EQ and invert the result.
10733 Therefore, the code below must handle only EQ and LT. */
10735 if (code == LE_EXPR || code == GT_EXPR)
10740 code = swap_tree_comparison (code);
10743 /* Note that it is safe to invert for real values here because we
10744 have already handled the one case that it matters. */
10747 if (code == NE_EXPR || code == GE_EXPR)
10750 code = invert_tree_comparison (code, false);
10753 /* Compute a result for LT or EQ if args permit;
10754 Otherwise return T. */
10755 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10757 if (code == EQ_EXPR)
10758 result = tree_int_cst_equal (op0, op1);
10759 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10760 result = INT_CST_LT_UNSIGNED (op0, op1);
10762 result = INT_CST_LT (op0, op1);
10769 return constant_boolean_node (result, type);
10772 /* Build an expression for the a clean point containing EXPR with type TYPE.
10773 Don't build a cleanup point expression for EXPR which don't have side
10777 fold_build_cleanup_point_expr (tree type, tree expr)
10779 /* If the expression does not have side effects then we don't have to wrap
10780 it with a cleanup point expression. */
10781 if (!TREE_SIDE_EFFECTS (expr))
10784 /* If the expression is a return, check to see if the expression inside the
10785 return has no side effects or the right hand side of the modify expression
10786 inside the return. If either don't have side effects set we don't need to
10787 wrap the expression in a cleanup point expression. Note we don't check the
10788 left hand side of the modify because it should always be a return decl. */
10789 if (TREE_CODE (expr) == RETURN_EXPR)
10791 tree op = TREE_OPERAND (expr, 0);
10792 if (!op || !TREE_SIDE_EFFECTS (op))
10794 op = TREE_OPERAND (op, 1);
10795 if (!TREE_SIDE_EFFECTS (op))
10799 return build1 (CLEANUP_POINT_EXPR, type, expr);
10802 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10803 avoid confusing the gimplify process. */
10806 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10808 /* The size of the object is not relevant when talking about its address. */
10809 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10810 t = TREE_OPERAND (t, 0);
10812 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10813 if (TREE_CODE (t) == INDIRECT_REF
10814 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10816 t = TREE_OPERAND (t, 0);
10817 if (TREE_TYPE (t) != ptrtype)
10818 t = build1 (NOP_EXPR, ptrtype, t);
10824 while (handled_component_p (base))
10825 base = TREE_OPERAND (base, 0);
10827 TREE_ADDRESSABLE (base) = 1;
10829 t = build1 (ADDR_EXPR, ptrtype, t);
10836 build_fold_addr_expr (tree t)
10838 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10841 /* Builds an expression for an indirection through T, simplifying some
10845 build_fold_indirect_ref (tree t)
10847 tree type = TREE_TYPE (TREE_TYPE (t));
10852 if (TREE_CODE (sub) == ADDR_EXPR)
10854 tree op = TREE_OPERAND (sub, 0);
10855 tree optype = TREE_TYPE (op);
10857 if (lang_hooks.types_compatible_p (type, optype))
10859 /* *(foo *)&fooarray => fooarray[0] */
10860 else if (TREE_CODE (optype) == ARRAY_TYPE
10861 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10862 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10865 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10866 subtype = TREE_TYPE (sub);
10867 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10868 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10870 sub = build_fold_indirect_ref (sub);
10871 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10874 return build1 (INDIRECT_REF, type, t);
10877 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10878 whose result is ignored. The type of the returned tree need not be
10879 the same as the original expression. */
10882 fold_ignored_result (tree t)
10884 if (!TREE_SIDE_EFFECTS (t))
10885 return integer_zero_node;
10888 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10891 t = TREE_OPERAND (t, 0);
10895 case tcc_comparison:
10896 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10897 t = TREE_OPERAND (t, 0);
10898 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10899 t = TREE_OPERAND (t, 1);
10904 case tcc_expression:
10905 switch (TREE_CODE (t))
10907 case COMPOUND_EXPR:
10908 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10910 t = TREE_OPERAND (t, 0);
10914 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10915 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10917 t = TREE_OPERAND (t, 0);
10930 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10931 This can only be applied to objects of a sizetype. */
10934 round_up (tree value, int divisor)
10936 tree div = NULL_TREE;
10938 gcc_assert (divisor > 0);
10942 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10943 have to do anything. Only do this when we are not given a const,
10944 because in that case, this check is more expensive than just
10946 if (TREE_CODE (value) != INTEGER_CST)
10948 div = build_int_cst (TREE_TYPE (value), divisor);
10950 if (multiple_of_p (TREE_TYPE (value), value, div))
10954 /* If divisor is a power of two, simplify this to bit manipulation. */
10955 if (divisor == (divisor & -divisor))
10959 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10960 value = size_binop (PLUS_EXPR, value, t);
10961 t = build_int_cst (TREE_TYPE (value), -divisor);
10962 value = size_binop (BIT_AND_EXPR, value, t);
10967 div = build_int_cst (TREE_TYPE (value), divisor);
10968 value = size_binop (CEIL_DIV_EXPR, value, div);
10969 value = size_binop (MULT_EXPR, value, div);
10975 /* Likewise, but round down. */
10978 round_down (tree value, int divisor)
10980 tree div = NULL_TREE;
10982 gcc_assert (divisor > 0);
10986 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10987 have to do anything. Only do this when we are not given a const,
10988 because in that case, this check is more expensive than just
10990 if (TREE_CODE (value) != INTEGER_CST)
10992 div = build_int_cst (TREE_TYPE (value), divisor);
10994 if (multiple_of_p (TREE_TYPE (value), value, div))
10998 /* If divisor is a power of two, simplify this to bit manipulation. */
10999 if (divisor == (divisor & -divisor))
11003 t = build_int_cst (TREE_TYPE (value), -divisor);
11004 value = size_binop (BIT_AND_EXPR, value, t);
11009 div = build_int_cst (TREE_TYPE (value), divisor);
11010 value = size_binop (FLOOR_DIV_EXPR, value, div);
11011 value = size_binop (MULT_EXPR, value, div);
11017 /* Returns the pointer to the base of the object addressed by EXP and
11018 extracts the information about the offset of the access, storing it
11019 to PBITPOS and POFFSET. */
11022 split_address_to_core_and_offset (tree exp,
11023 HOST_WIDE_INT *pbitpos, tree *poffset)
11026 enum machine_mode mode;
11027 int unsignedp, volatilep;
11028 HOST_WIDE_INT bitsize;
11030 if (TREE_CODE (exp) == ADDR_EXPR)
11032 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11033 poffset, &mode, &unsignedp, &volatilep,
11036 if (TREE_CODE (core) == INDIRECT_REF)
11037 core = TREE_OPERAND (core, 0);
11043 *poffset = NULL_TREE;
11049 /* Returns true if addresses of E1 and E2 differ by a constant, false
11050 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11053 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11056 HOST_WIDE_INT bitpos1, bitpos2;
11057 tree toffset1, toffset2, tdiff, type;
11059 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11060 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11062 if (bitpos1 % BITS_PER_UNIT != 0
11063 || bitpos2 % BITS_PER_UNIT != 0
11064 || !operand_equal_p (core1, core2, 0))
11067 if (toffset1 && toffset2)
11069 type = TREE_TYPE (toffset1);
11070 if (type != TREE_TYPE (toffset2))
11071 toffset2 = fold_convert (type, toffset2);
11073 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11074 if (!host_integerp (tdiff, 0))
11077 *diff = tree_low_cst (tdiff, 0);
11079 else if (toffset1 || toffset2)
11081 /* If only one of the offsets is non-constant, the difference cannot
11088 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;