1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 || (overflowable > 0 && sign_extended_type))
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
286 TREE_CONSTANT_OVERFLOW (t) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Return nonzero if the operation overflows according to UNSIGNED_P.
295 Each argument is given as two `HOST_WIDE_INT' pieces.
296 One argument is L1 and H1; the other, L2 and H2.
297 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
301 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
302 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305 unsigned HOST_WIDE_INT l;
309 h = h1 + h2 + (l < l1);
315 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
317 return OVERFLOW_SUM_SIGN (h1, h2, h);
320 /* Negate a doubleword integer with doubleword result.
321 Return nonzero if the operation overflows, assuming it's signed.
322 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
333 return (*hv & h1) < 0;
343 /* Multiply two doubleword integers with doubleword result.
344 Return nonzero if the operation overflows according to UNSIGNED_P.
345 Each argument is given as two `HOST_WIDE_INT' pieces.
346 One argument is L1 and H1; the other, L2 and H2.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355 HOST_WIDE_INT arg1[4];
356 HOST_WIDE_INT arg2[4];
357 HOST_WIDE_INT prod[4 * 2];
358 unsigned HOST_WIDE_INT carry;
360 unsigned HOST_WIDE_INT toplow, neglow;
361 HOST_WIDE_INT tophigh, neghigh;
363 encode (arg1, l1, h1);
364 encode (arg2, l2, h2);
366 memset (prod, 0, sizeof prod);
368 for (i = 0; i < 4; i++)
371 for (j = 0; j < 4; j++)
374 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
375 carry += arg1[i] * arg2[j];
376 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
378 prod[k] = LOWPART (carry);
379 carry = HIGHPART (carry);
384 decode (prod, lv, hv);
385 decode (prod + 4, &toplow, &tophigh);
387 /* Unsigned overflow is immediate. */
389 return (toplow | tophigh) != 0;
391 /* Check for signed overflow by calculating the signed representation of the
392 top half of the result; it should agree with the low half's sign bit. */
395 neg_double (l2, h2, &neglow, &neghigh);
396 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
400 neg_double (l1, h1, &neglow, &neghigh);
401 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
403 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406 /* Shift the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Shift right if COUNT is negative.
409 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
414 HOST_WIDE_INT count, unsigned int prec,
415 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
417 unsigned HOST_WIDE_INT signmask;
421 rshift_double (l1, h1, -count, prec, lv, hv, arith);
425 if (SHIFT_COUNT_TRUNCATED)
428 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
430 /* Shifting by the host word size is undefined according to the
431 ANSI standard, so we must handle this as a special case. */
435 else if (count >= HOST_BITS_PER_WIDE_INT)
437 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
442 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
443 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
447 /* Sign extend all bits that are beyond the precision. */
449 signmask = -((prec > HOST_BITS_PER_WIDE_INT
450 ? ((unsigned HOST_WIDE_INT) *hv
451 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
452 : (*lv >> (prec - 1))) & 1);
454 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
456 else if (prec >= HOST_BITS_PER_WIDE_INT)
458 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
459 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
464 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
465 *lv |= signmask << prec;
469 /* Shift the doubleword integer in L1, H1 right by COUNT places
470 keeping only PREC bits of result. COUNT must be positive.
471 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
472 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
476 HOST_WIDE_INT count, unsigned int prec,
477 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480 unsigned HOST_WIDE_INT signmask;
483 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486 if (SHIFT_COUNT_TRUNCATED)
489 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
491 /* Shifting by the host word size is undefined according to the
492 ANSI standard, so we must handle this as a special case. */
496 else if (count >= HOST_BITS_PER_WIDE_INT)
499 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
503 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
505 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508 /* Zero / sign extend all bits that are beyond the precision. */
510 if (count >= (HOST_WIDE_INT)prec)
515 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
517 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
519 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
520 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
525 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
526 *lv |= signmask << (prec - count);
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531 keeping only PREC bits of result.
532 Rotate right if COUNT is negative.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
537 HOST_WIDE_INT count, unsigned int prec,
538 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
540 unsigned HOST_WIDE_INT s1l, s2l;
541 HOST_WIDE_INT s1h, s2h;
547 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
548 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 /* Rotate the doubleword integer in L1, H1 left by COUNT places
554 keeping only PREC bits of result. COUNT must be positive.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
569 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
576 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
577 CODE is a tree code for a kind of division, one of
578 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
580 It controls how the quotient is rounded to an integer.
581 Return nonzero if the operation overflows.
582 UNS nonzero says do unsigned division. */
585 div_and_round_double (enum tree_code code, int uns,
586 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
587 HOST_WIDE_INT hnum_orig,
588 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
589 HOST_WIDE_INT hden_orig,
590 unsigned HOST_WIDE_INT *lquo,
591 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
595 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
596 HOST_WIDE_INT den[4], quo[4];
598 unsigned HOST_WIDE_INT work;
599 unsigned HOST_WIDE_INT carry = 0;
600 unsigned HOST_WIDE_INT lnum = lnum_orig;
601 HOST_WIDE_INT hnum = hnum_orig;
602 unsigned HOST_WIDE_INT lden = lden_orig;
603 HOST_WIDE_INT hden = hden_orig;
606 if (hden == 0 && lden == 0)
607 overflow = 1, lden = 1;
609 /* Calculate quotient sign and convert operands to unsigned. */
615 /* (minimum integer) / (-1) is the only overflow case. */
616 if (neg_double (lnum, hnum, &lnum, &hnum)
617 && ((HOST_WIDE_INT) lden & hden) == -1)
623 neg_double (lden, hden, &lden, &hden);
627 if (hnum == 0 && hden == 0)
628 { /* single precision */
630 /* This unsigned division rounds toward zero. */
636 { /* trivial case: dividend < divisor */
637 /* hden != 0 already checked. */
644 memset (quo, 0, sizeof quo);
646 memset (num, 0, sizeof num); /* to zero 9th element */
647 memset (den, 0, sizeof den);
649 encode (num, lnum, hnum);
650 encode (den, lden, hden);
652 /* Special code for when the divisor < BASE. */
653 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
655 /* hnum != 0 already checked. */
656 for (i = 4 - 1; i >= 0; i--)
658 work = num[i] + carry * BASE;
659 quo[i] = work / lden;
665 /* Full double precision division,
666 with thanks to Don Knuth's "Seminumerical Algorithms". */
667 int num_hi_sig, den_hi_sig;
668 unsigned HOST_WIDE_INT quo_est, scale;
670 /* Find the highest nonzero divisor digit. */
671 for (i = 4 - 1;; i--)
678 /* Insure that the first digit of the divisor is at least BASE/2.
679 This is required by the quotient digit estimation algorithm. */
681 scale = BASE / (den[den_hi_sig] + 1);
683 { /* scale divisor and dividend */
685 for (i = 0; i <= 4 - 1; i++)
687 work = (num[i] * scale) + carry;
688 num[i] = LOWPART (work);
689 carry = HIGHPART (work);
694 for (i = 0; i <= 4 - 1; i++)
696 work = (den[i] * scale) + carry;
697 den[i] = LOWPART (work);
698 carry = HIGHPART (work);
699 if (den[i] != 0) den_hi_sig = i;
706 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
708 /* Guess the next quotient digit, quo_est, by dividing the first
709 two remaining dividend digits by the high order quotient digit.
710 quo_est is never low and is at most 2 high. */
711 unsigned HOST_WIDE_INT tmp;
713 num_hi_sig = i + den_hi_sig + 1;
714 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
715 if (num[num_hi_sig] != den[den_hi_sig])
716 quo_est = work / den[den_hi_sig];
720 /* Refine quo_est so it's usually correct, and at most one high. */
721 tmp = work - quo_est * den[den_hi_sig];
723 && (den[den_hi_sig - 1] * quo_est
724 > (tmp * BASE + num[num_hi_sig - 2])))
727 /* Try QUO_EST as the quotient digit, by multiplying the
728 divisor by QUO_EST and subtracting from the remaining dividend.
729 Keep in mind that QUO_EST is the I - 1st digit. */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = quo_est * den[j] + carry;
735 carry = HIGHPART (work);
736 work = num[i + j] - LOWPART (work);
737 num[i + j] = LOWPART (work);
738 carry += HIGHPART (work) != 0;
741 /* If quo_est was high by one, then num[i] went negative and
742 we need to correct things. */
743 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 carry = 0; /* add divisor back in */
747 for (j = 0; j <= den_hi_sig; j++)
749 work = num[i + j] + den[j] + carry;
750 carry = HIGHPART (work);
751 num[i + j] = LOWPART (work);
754 num [num_hi_sig] += carry;
757 /* Store the quotient digit. */
762 decode (quo, lquo, hquo);
765 /* If result is negative, make it so. */
767 neg_double (*lquo, *hquo, lquo, hquo);
769 /* Compute trial remainder: rem = num - (quo * den) */
770 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
771 neg_double (*lrem, *hrem, lrem, hrem);
772 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
777 case TRUNC_MOD_EXPR: /* round toward zero */
778 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
782 case FLOOR_MOD_EXPR: /* round toward negative infinity */
783 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
794 case CEIL_MOD_EXPR: /* round toward positive infinity */
795 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
797 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
805 case ROUND_MOD_EXPR: /* round to closest integer */
807 unsigned HOST_WIDE_INT labs_rem = *lrem;
808 HOST_WIDE_INT habs_rem = *hrem;
809 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
810 HOST_WIDE_INT habs_den = hden, htwice;
812 /* Get absolute values. */
814 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
816 neg_double (lden, hden, &labs_den, &habs_den);
818 /* If (2 * abs (lrem) >= abs (lden)) */
819 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
820 labs_rem, habs_rem, <wice, &htwice);
822 if (((unsigned HOST_WIDE_INT) habs_den
823 < (unsigned HOST_WIDE_INT) htwice)
824 || (((unsigned HOST_WIDE_INT) habs_den
825 == (unsigned HOST_WIDE_INT) htwice)
826 && (labs_den < ltwice)))
830 add_double (*lquo, *hquo,
831 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
846 /* Compute true remainder: rem = num - (quo * den) */
847 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
848 neg_double (*lrem, *hrem, lrem, hrem);
849 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
853 /* If ARG2 divides ARG1 with zero remainder, carries out the division
854 of type CODE and returns the quotient.
855 Otherwise returns NULL_TREE. */
858 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
860 unsigned HOST_WIDE_INT int1l, int2l;
861 HOST_WIDE_INT int1h, int2h;
862 unsigned HOST_WIDE_INT quol, reml;
863 HOST_WIDE_INT quoh, remh;
864 tree type = TREE_TYPE (arg1);
865 int uns = TYPE_UNSIGNED (type);
867 int1l = TREE_INT_CST_LOW (arg1);
868 int1h = TREE_INT_CST_HIGH (arg1);
869 int2l = TREE_INT_CST_LOW (arg2);
870 int2h = TREE_INT_CST_HIGH (arg2);
872 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
873 &quol, &quoh, &reml, &remh);
874 if (remh != 0 || reml != 0)
877 return build_int_cst_wide (type, quol, quoh);
880 /* Return true if the built-in mathematical function specified by CODE
881 is odd, i.e. -f(x) == f(-x). */
884 negate_mathfn_p (enum built_in_function code)
888 CASE_FLT_FN (BUILT_IN_ASIN):
889 CASE_FLT_FN (BUILT_IN_ASINH):
890 CASE_FLT_FN (BUILT_IN_ATAN):
891 CASE_FLT_FN (BUILT_IN_ATANH):
892 CASE_FLT_FN (BUILT_IN_CBRT):
893 CASE_FLT_FN (BUILT_IN_SIN):
894 CASE_FLT_FN (BUILT_IN_SINH):
895 CASE_FLT_FN (BUILT_IN_TAN):
896 CASE_FLT_FN (BUILT_IN_TANH):
897 CASE_FLT_FN (BUILT_IN_ERF):
906 /* Check whether we may negate an integer constant T without causing
910 may_negate_without_overflow_p (tree t)
912 unsigned HOST_WIDE_INT val;
916 gcc_assert (TREE_CODE (t) == INTEGER_CST);
918 type = TREE_TYPE (t);
919 if (TYPE_UNSIGNED (type))
922 prec = TYPE_PRECISION (type);
923 if (prec > HOST_BITS_PER_WIDE_INT)
925 if (TREE_INT_CST_LOW (t) != 0)
927 prec -= HOST_BITS_PER_WIDE_INT;
928 val = TREE_INT_CST_HIGH (t);
931 val = TREE_INT_CST_LOW (t);
932 if (prec < HOST_BITS_PER_WIDE_INT)
933 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
934 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
937 /* Determine whether an expression T can be cheaply negated using
938 the function negate_expr without introducing undefined overflow. */
941 negate_expr_p (tree t)
948 type = TREE_TYPE (t);
951 switch (TREE_CODE (t))
954 if (TYPE_UNSIGNED (type)
955 || (flag_wrapv && ! flag_trapv))
958 /* Check that -CST will not overflow type. */
959 return may_negate_without_overflow_p (t);
961 return INTEGRAL_TYPE_P (type)
962 && (TYPE_UNSIGNED (type)
963 || (flag_wrapv && !flag_trapv));
970 return negate_expr_p (TREE_REALPART (t))
971 && negate_expr_p (TREE_IMAGPART (t));
974 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
976 /* -(A + B) -> (-B) - A. */
977 if (negate_expr_p (TREE_OPERAND (t, 1))
978 && reorder_operands_p (TREE_OPERAND (t, 0),
979 TREE_OPERAND (t, 1)))
981 /* -(A + B) -> (-A) - B. */
982 return negate_expr_p (TREE_OPERAND (t, 0));
985 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
986 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987 && reorder_operands_p (TREE_OPERAND (t, 0),
988 TREE_OPERAND (t, 1));
991 if (TYPE_UNSIGNED (TREE_TYPE (t)))
997 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
998 return negate_expr_p (TREE_OPERAND (t, 1))
999 || negate_expr_p (TREE_OPERAND (t, 0));
1002 case TRUNC_DIV_EXPR:
1003 case ROUND_DIV_EXPR:
1004 case FLOOR_DIV_EXPR:
1006 case EXACT_DIV_EXPR:
1007 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1009 return negate_expr_p (TREE_OPERAND (t, 1))
1010 || negate_expr_p (TREE_OPERAND (t, 0));
1013 /* Negate -((double)float) as (double)(-float). */
1014 if (TREE_CODE (type) == REAL_TYPE)
1016 tree tem = strip_float_extensions (t);
1018 return negate_expr_p (tem);
1023 /* Negate -f(x) as f(-x). */
1024 if (negate_mathfn_p (builtin_mathfn_code (t)))
1025 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1029 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1030 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1032 tree op1 = TREE_OPERAND (t, 1);
1033 if (TREE_INT_CST_HIGH (op1) == 0
1034 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1035 == TREE_INT_CST_LOW (op1))
1046 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1047 simplification is possible.
1048 If negate_expr_p would return true for T, NULL_TREE will never be
1052 fold_negate_expr (tree t)
1054 tree type = TREE_TYPE (t);
1057 switch (TREE_CODE (t))
1059 /* Convert - (~A) to A + 1. */
1061 if (INTEGRAL_TYPE_P (type))
1062 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1063 build_int_cst (type, 1));
1067 tem = fold_negate_const (t, type);
1068 if (! TREE_OVERFLOW (tem)
1069 || TYPE_UNSIGNED (type)
1075 tem = fold_negate_const (t, type);
1076 /* Two's complement FP formats, such as c4x, may overflow. */
1077 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1083 tree rpart = negate_expr (TREE_REALPART (t));
1084 tree ipart = negate_expr (TREE_IMAGPART (t));
1086 if ((TREE_CODE (rpart) == REAL_CST
1087 && TREE_CODE (ipart) == REAL_CST)
1088 || (TREE_CODE (rpart) == INTEGER_CST
1089 && TREE_CODE (ipart) == INTEGER_CST))
1090 return build_complex (type, rpart, ipart);
1095 return TREE_OPERAND (t, 0);
1098 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1100 /* -(A + B) -> (-B) - A. */
1101 if (negate_expr_p (TREE_OPERAND (t, 1))
1102 && reorder_operands_p (TREE_OPERAND (t, 0),
1103 TREE_OPERAND (t, 1)))
1105 tem = negate_expr (TREE_OPERAND (t, 1));
1106 return fold_build2 (MINUS_EXPR, type,
1107 tem, TREE_OPERAND (t, 0));
1110 /* -(A + B) -> (-A) - B. */
1111 if (negate_expr_p (TREE_OPERAND (t, 0)))
1113 tem = negate_expr (TREE_OPERAND (t, 0));
1114 return fold_build2 (MINUS_EXPR, type,
1115 tem, TREE_OPERAND (t, 1));
1121 /* - (A - B) -> B - A */
1122 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1123 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1124 return fold_build2 (MINUS_EXPR, type,
1125 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1129 if (TYPE_UNSIGNED (type))
1135 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1137 tem = TREE_OPERAND (t, 1);
1138 if (negate_expr_p (tem))
1139 return fold_build2 (TREE_CODE (t), type,
1140 TREE_OPERAND (t, 0), negate_expr (tem));
1141 tem = TREE_OPERAND (t, 0);
1142 if (negate_expr_p (tem))
1143 return fold_build2 (TREE_CODE (t), type,
1144 negate_expr (tem), TREE_OPERAND (t, 1));
1148 case TRUNC_DIV_EXPR:
1149 case ROUND_DIV_EXPR:
1150 case FLOOR_DIV_EXPR:
1152 case EXACT_DIV_EXPR:
1153 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1155 tem = TREE_OPERAND (t, 1);
1156 if (negate_expr_p (tem))
1157 return fold_build2 (TREE_CODE (t), type,
1158 TREE_OPERAND (t, 0), negate_expr (tem));
1159 tem = TREE_OPERAND (t, 0);
1160 if (negate_expr_p (tem))
1161 return fold_build2 (TREE_CODE (t), type,
1162 negate_expr (tem), TREE_OPERAND (t, 1));
1167 /* Convert -((double)float) into (double)(-float). */
1168 if (TREE_CODE (type) == REAL_TYPE)
1170 tem = strip_float_extensions (t);
1171 if (tem != t && negate_expr_p (tem))
1172 return negate_expr (tem);
1177 /* Negate -f(x) as f(-x). */
1178 if (negate_mathfn_p (builtin_mathfn_code (t))
1179 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1181 tree fndecl, arg, arglist;
1183 fndecl = get_callee_fndecl (t);
1184 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1185 arglist = build_tree_list (NULL_TREE, arg);
1186 return build_function_call_expr (fndecl, arglist);
1191 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1192 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1194 tree op1 = TREE_OPERAND (t, 1);
1195 if (TREE_INT_CST_HIGH (op1) == 0
1196 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1197 == TREE_INT_CST_LOW (op1))
1199 tree ntype = TYPE_UNSIGNED (type)
1200 ? lang_hooks.types.signed_type (type)
1201 : lang_hooks.types.unsigned_type (type);
1202 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1203 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1204 return fold_convert (type, temp);
1216 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1217 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1218 return NULL_TREE. */
1221 negate_expr (tree t)
1228 type = TREE_TYPE (t);
1229 STRIP_SIGN_NOPS (t);
1231 tem = fold_negate_expr (t);
1233 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1234 return fold_convert (type, tem);
1237 /* Split a tree IN into a constant, literal and variable parts that could be
1238 combined with CODE to make IN. "constant" means an expression with
1239 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1240 commutative arithmetic operation. Store the constant part into *CONP,
1241 the literal in *LITP and return the variable part. If a part isn't
1242 present, set it to null. If the tree does not decompose in this way,
1243 return the entire tree as the variable part and the other parts as null.
1245 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1246 case, we negate an operand that was subtracted. Except if it is a
1247 literal for which we use *MINUS_LITP instead.
1249 If NEGATE_P is true, we are negating all of IN, again except a literal
1250 for which we use *MINUS_LITP instead.
1252 If IN is itself a literal or constant, return it as appropriate.
1254 Note that we do not guarantee that any of the three values will be the
1255 same type as IN, but they will have the same signedness and mode. */
1258 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1259 tree *minus_litp, int negate_p)
1267 /* Strip any conversions that don't change the machine mode or signedness. */
1268 STRIP_SIGN_NOPS (in);
1270 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1272 else if (TREE_CODE (in) == code
1273 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1274 /* We can associate addition and subtraction together (even
1275 though the C standard doesn't say so) for integers because
1276 the value is not affected. For reals, the value might be
1277 affected, so we can't. */
1278 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1279 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1281 tree op0 = TREE_OPERAND (in, 0);
1282 tree op1 = TREE_OPERAND (in, 1);
1283 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1284 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1286 /* First see if either of the operands is a literal, then a constant. */
1287 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1288 *litp = op0, op0 = 0;
1289 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1290 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1292 if (op0 != 0 && TREE_CONSTANT (op0))
1293 *conp = op0, op0 = 0;
1294 else if (op1 != 0 && TREE_CONSTANT (op1))
1295 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1297 /* If we haven't dealt with either operand, this is not a case we can
1298 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1299 if (op0 != 0 && op1 != 0)
1304 var = op1, neg_var_p = neg1_p;
1306 /* Now do any needed negations. */
1308 *minus_litp = *litp, *litp = 0;
1310 *conp = negate_expr (*conp);
1312 var = negate_expr (var);
1314 else if (TREE_CONSTANT (in))
1322 *minus_litp = *litp, *litp = 0;
1323 else if (*minus_litp)
1324 *litp = *minus_litp, *minus_litp = 0;
1325 *conp = negate_expr (*conp);
1326 var = negate_expr (var);
1332 /* Re-associate trees split by the above function. T1 and T2 are either
1333 expressions to associate or null. Return the new expression, if any. If
1334 we build an operation, do it in TYPE and with CODE. */
1337 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1344 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1345 try to fold this since we will have infinite recursion. But do
1346 deal with any NEGATE_EXPRs. */
1347 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1348 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1350 if (code == PLUS_EXPR)
1352 if (TREE_CODE (t1) == NEGATE_EXPR)
1353 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1354 fold_convert (type, TREE_OPERAND (t1, 0)));
1355 else if (TREE_CODE (t2) == NEGATE_EXPR)
1356 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1357 fold_convert (type, TREE_OPERAND (t2, 0)));
1358 else if (integer_zerop (t2))
1359 return fold_convert (type, t1);
1361 else if (code == MINUS_EXPR)
1363 if (integer_zerop (t2))
1364 return fold_convert (type, t1);
1367 return build2 (code, type, fold_convert (type, t1),
1368 fold_convert (type, t2));
1371 return fold_build2 (code, type, fold_convert (type, t1),
1372 fold_convert (type, t2));
1375 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1376 for use in int_const_binop, size_binop and size_diffop. */
1379 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1381 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1383 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1398 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1399 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1400 && TYPE_MODE (type1) == TYPE_MODE (type2);
1404 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1405 to produce a new constant. Return NULL_TREE if we don't know how
1406 to evaluate CODE at compile-time.
1408 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1411 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1413 unsigned HOST_WIDE_INT int1l, int2l;
1414 HOST_WIDE_INT int1h, int2h;
1415 unsigned HOST_WIDE_INT low;
1417 unsigned HOST_WIDE_INT garbagel;
1418 HOST_WIDE_INT garbageh;
1420 tree type = TREE_TYPE (arg1);
1421 int uns = TYPE_UNSIGNED (type);
1423 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1426 int1l = TREE_INT_CST_LOW (arg1);
1427 int1h = TREE_INT_CST_HIGH (arg1);
1428 int2l = TREE_INT_CST_LOW (arg2);
1429 int2h = TREE_INT_CST_HIGH (arg2);
1434 low = int1l | int2l, hi = int1h | int2h;
1438 low = int1l ^ int2l, hi = int1h ^ int2h;
1442 low = int1l & int2l, hi = int1h & int2h;
1448 /* It's unclear from the C standard whether shifts can overflow.
1449 The following code ignores overflow; perhaps a C standard
1450 interpretation ruling is needed. */
1451 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1458 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1463 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1467 neg_double (int2l, int2h, &low, &hi);
1468 add_double (int1l, int1h, low, hi, &low, &hi);
1469 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1473 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1476 case TRUNC_DIV_EXPR:
1477 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1478 case EXACT_DIV_EXPR:
1479 /* This is a shortcut for a common special case. */
1480 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1481 && ! TREE_CONSTANT_OVERFLOW (arg1)
1482 && ! TREE_CONSTANT_OVERFLOW (arg2)
1483 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1485 if (code == CEIL_DIV_EXPR)
1488 low = int1l / int2l, hi = 0;
1492 /* ... fall through ... */
1494 case ROUND_DIV_EXPR:
1495 if (int2h == 0 && int2l == 0)
1497 if (int2h == 0 && int2l == 1)
1499 low = int1l, hi = int1h;
1502 if (int1l == int2l && int1h == int2h
1503 && ! (int1l == 0 && int1h == 0))
1508 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1509 &low, &hi, &garbagel, &garbageh);
1512 case TRUNC_MOD_EXPR:
1513 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1514 /* This is a shortcut for a common special case. */
1515 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1516 && ! TREE_CONSTANT_OVERFLOW (arg1)
1517 && ! TREE_CONSTANT_OVERFLOW (arg2)
1518 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1520 if (code == CEIL_MOD_EXPR)
1522 low = int1l % int2l, hi = 0;
1526 /* ... fall through ... */
1528 case ROUND_MOD_EXPR:
1529 if (int2h == 0 && int2l == 0)
1531 overflow = div_and_round_double (code, uns,
1532 int1l, int1h, int2l, int2h,
1533 &garbagel, &garbageh, &low, &hi);
1539 low = (((unsigned HOST_WIDE_INT) int1h
1540 < (unsigned HOST_WIDE_INT) int2h)
1541 || (((unsigned HOST_WIDE_INT) int1h
1542 == (unsigned HOST_WIDE_INT) int2h)
1545 low = (int1h < int2h
1546 || (int1h == int2h && int1l < int2l));
1548 if (low == (code == MIN_EXPR))
1549 low = int1l, hi = int1h;
1551 low = int2l, hi = int2h;
1558 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1562 /* Propagate overflow flags ourselves. */
1563 if (((!uns || is_sizetype) && overflow)
1564 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1567 TREE_OVERFLOW (t) = 1;
1568 TREE_CONSTANT_OVERFLOW (t) = 1;
1570 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1573 TREE_CONSTANT_OVERFLOW (t) = 1;
1577 t = force_fit_type (t, 1,
1578 ((!uns || is_sizetype) && overflow)
1579 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1580 TREE_CONSTANT_OVERFLOW (arg1)
1581 | TREE_CONSTANT_OVERFLOW (arg2));
1586 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1587 constant. We assume ARG1 and ARG2 have the same data type, or at least
1588 are the same kind of constant and the same machine mode. Return zero if
1589 combining the constants is not allowed in the current operating mode.
1591 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1594 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1596 /* Sanity check for the recursive cases. */
1603 if (TREE_CODE (arg1) == INTEGER_CST)
1604 return int_const_binop (code, arg1, arg2, notrunc);
1606 if (TREE_CODE (arg1) == REAL_CST)
1608 enum machine_mode mode;
1611 REAL_VALUE_TYPE value;
1612 REAL_VALUE_TYPE result;
1616 /* The following codes are handled by real_arithmetic. */
1631 d1 = TREE_REAL_CST (arg1);
1632 d2 = TREE_REAL_CST (arg2);
1634 type = TREE_TYPE (arg1);
1635 mode = TYPE_MODE (type);
1637 /* Don't perform operation if we honor signaling NaNs and
1638 either operand is a NaN. */
1639 if (HONOR_SNANS (mode)
1640 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1643 /* Don't perform operation if it would raise a division
1644 by zero exception. */
1645 if (code == RDIV_EXPR
1646 && REAL_VALUES_EQUAL (d2, dconst0)
1647 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1650 /* If either operand is a NaN, just return it. Otherwise, set up
1651 for floating-point trap; we return an overflow. */
1652 if (REAL_VALUE_ISNAN (d1))
1654 else if (REAL_VALUE_ISNAN (d2))
1657 inexact = real_arithmetic (&value, code, &d1, &d2);
1658 real_convert (&result, mode, &value);
1660 /* Don't constant fold this floating point operation if
1661 the result has overflowed and flag_trapping_math. */
1662 if (flag_trapping_math
1663 && MODE_HAS_INFINITIES (mode)
1664 && REAL_VALUE_ISINF (result)
1665 && !REAL_VALUE_ISINF (d1)
1666 && !REAL_VALUE_ISINF (d2))
1669 /* Don't constant fold this floating point operation if the
1670 result may dependent upon the run-time rounding mode and
1671 flag_rounding_math is set, or if GCC's software emulation
1672 is unable to accurately represent the result. */
1673 if ((flag_rounding_math
1674 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1675 && !flag_unsafe_math_optimizations))
1676 && (inexact || !real_identical (&result, &value)))
1679 t = build_real (type, result);
1681 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1682 TREE_CONSTANT_OVERFLOW (t)
1684 | TREE_CONSTANT_OVERFLOW (arg1)
1685 | TREE_CONSTANT_OVERFLOW (arg2);
1689 if (TREE_CODE (arg1) == COMPLEX_CST)
1691 tree type = TREE_TYPE (arg1);
1692 tree r1 = TREE_REALPART (arg1);
1693 tree i1 = TREE_IMAGPART (arg1);
1694 tree r2 = TREE_REALPART (arg2);
1695 tree i2 = TREE_IMAGPART (arg2);
1702 real = const_binop (code, r1, r2, notrunc);
1703 imag = const_binop (code, i1, i2, notrunc);
1707 real = const_binop (MINUS_EXPR,
1708 const_binop (MULT_EXPR, r1, r2, notrunc),
1709 const_binop (MULT_EXPR, i1, i2, notrunc),
1711 imag = const_binop (PLUS_EXPR,
1712 const_binop (MULT_EXPR, r1, i2, notrunc),
1713 const_binop (MULT_EXPR, i1, r2, notrunc),
1720 = const_binop (PLUS_EXPR,
1721 const_binop (MULT_EXPR, r2, r2, notrunc),
1722 const_binop (MULT_EXPR, i2, i2, notrunc),
1725 = const_binop (PLUS_EXPR,
1726 const_binop (MULT_EXPR, r1, r2, notrunc),
1727 const_binop (MULT_EXPR, i1, i2, notrunc),
1730 = const_binop (MINUS_EXPR,
1731 const_binop (MULT_EXPR, i1, r2, notrunc),
1732 const_binop (MULT_EXPR, r1, i2, notrunc),
1735 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1736 code = TRUNC_DIV_EXPR;
1738 real = const_binop (code, t1, magsquared, notrunc);
1739 imag = const_binop (code, t2, magsquared, notrunc);
1748 return build_complex (type, real, imag);
1754 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1755 indicates which particular sizetype to create. */
1758 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1760 return build_int_cst (sizetype_tab[(int) kind], number);
1763 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1764 is a tree code. The type of the result is taken from the operands.
1765 Both must be equivalent integer types, ala int_binop_types_match_p.
1766 If the operands are constant, so is the result. */
1769 size_binop (enum tree_code code, tree arg0, tree arg1)
1771 tree type = TREE_TYPE (arg0);
1773 if (arg0 == error_mark_node || arg1 == error_mark_node)
1774 return error_mark_node;
1776 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1779 /* Handle the special case of two integer constants faster. */
1780 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1782 /* And some specific cases even faster than that. */
1783 if (code == PLUS_EXPR && integer_zerop (arg0))
1785 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1786 && integer_zerop (arg1))
1788 else if (code == MULT_EXPR && integer_onep (arg0))
1791 /* Handle general case of two integer constants. */
1792 return int_const_binop (code, arg0, arg1, 0);
1795 return fold_build2 (code, type, arg0, arg1);
1798 /* Given two values, either both of sizetype or both of bitsizetype,
1799 compute the difference between the two values. Return the value
1800 in signed type corresponding to the type of the operands. */
1803 size_diffop (tree arg0, tree arg1)
1805 tree type = TREE_TYPE (arg0);
1808 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1811 /* If the type is already signed, just do the simple thing. */
1812 if (!TYPE_UNSIGNED (type))
1813 return size_binop (MINUS_EXPR, arg0, arg1);
1815 if (type == sizetype)
1817 else if (type == bitsizetype)
1818 ctype = sbitsizetype;
1820 ctype = lang_hooks.types.signed_type (type);
1822 /* If either operand is not a constant, do the conversions to the signed
1823 type and subtract. The hardware will do the right thing with any
1824 overflow in the subtraction. */
1825 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1826 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1827 fold_convert (ctype, arg1));
1829 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1830 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1831 overflow) and negate (which can't either). Special-case a result
1832 of zero while we're here. */
1833 if (tree_int_cst_equal (arg0, arg1))
1834 return build_int_cst (ctype, 0);
1835 else if (tree_int_cst_lt (arg1, arg0))
1836 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1838 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1839 fold_convert (ctype, size_binop (MINUS_EXPR,
1843 /* A subroutine of fold_convert_const handling conversions of an
1844 INTEGER_CST to another integer type. */
1847 fold_convert_const_int_from_int (tree type, tree arg1)
1851 /* Given an integer constant, make new constant with new type,
1852 appropriately sign-extended or truncated. */
1853 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1854 TREE_INT_CST_HIGH (arg1));
1856 t = force_fit_type (t,
1857 /* Don't set the overflow when
1858 converting a pointer */
1859 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1860 (TREE_INT_CST_HIGH (arg1) < 0
1861 && (TYPE_UNSIGNED (type)
1862 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1863 | TREE_OVERFLOW (arg1),
1864 TREE_CONSTANT_OVERFLOW (arg1));
1869 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1870 to an integer type. */
1873 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1878 /* The following code implements the floating point to integer
1879 conversion rules required by the Java Language Specification,
1880 that IEEE NaNs are mapped to zero and values that overflow
1881 the target precision saturate, i.e. values greater than
1882 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1883 are mapped to INT_MIN. These semantics are allowed by the
1884 C and C++ standards that simply state that the behavior of
1885 FP-to-integer conversion is unspecified upon overflow. */
1887 HOST_WIDE_INT high, low;
1889 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1893 case FIX_TRUNC_EXPR:
1894 real_trunc (&r, VOIDmode, &x);
1901 /* If R is NaN, return zero and show we have an overflow. */
1902 if (REAL_VALUE_ISNAN (r))
1909 /* See if R is less than the lower bound or greater than the
1914 tree lt = TYPE_MIN_VALUE (type);
1915 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1916 if (REAL_VALUES_LESS (r, l))
1919 high = TREE_INT_CST_HIGH (lt);
1920 low = TREE_INT_CST_LOW (lt);
1926 tree ut = TYPE_MAX_VALUE (type);
1929 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1930 if (REAL_VALUES_LESS (u, r))
1933 high = TREE_INT_CST_HIGH (ut);
1934 low = TREE_INT_CST_LOW (ut);
1940 REAL_VALUE_TO_INT (&low, &high, r);
1942 t = build_int_cst_wide (type, low, high);
1944 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1945 TREE_CONSTANT_OVERFLOW (arg1));
1949 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1950 to another floating point type. */
1953 fold_convert_const_real_from_real (tree type, tree arg1)
1955 REAL_VALUE_TYPE value;
1958 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1959 t = build_real (type, value);
1961 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1962 TREE_CONSTANT_OVERFLOW (t)
1963 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1967 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1968 type TYPE. If no simplification can be done return NULL_TREE. */
1971 fold_convert_const (enum tree_code code, tree type, tree arg1)
1973 if (TREE_TYPE (arg1) == type)
1976 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1978 if (TREE_CODE (arg1) == INTEGER_CST)
1979 return fold_convert_const_int_from_int (type, arg1);
1980 else if (TREE_CODE (arg1) == REAL_CST)
1981 return fold_convert_const_int_from_real (code, type, arg1);
1983 else if (TREE_CODE (type) == REAL_TYPE)
1985 if (TREE_CODE (arg1) == INTEGER_CST)
1986 return build_real_from_int_cst (type, arg1);
1987 if (TREE_CODE (arg1) == REAL_CST)
1988 return fold_convert_const_real_from_real (type, arg1);
1993 /* Construct a vector of zero elements of vector type TYPE. */
1996 build_zero_vector (tree type)
2001 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2002 units = TYPE_VECTOR_SUBPARTS (type);
2005 for (i = 0; i < units; i++)
2006 list = tree_cons (NULL_TREE, elem, list);
2007 return build_vector (type, list);
2010 /* Convert expression ARG to type TYPE. Used by the middle-end for
2011 simple conversions in preference to calling the front-end's convert. */
2014 fold_convert (tree type, tree arg)
2016 tree orig = TREE_TYPE (arg);
2022 if (TREE_CODE (arg) == ERROR_MARK
2023 || TREE_CODE (type) == ERROR_MARK
2024 || TREE_CODE (orig) == ERROR_MARK)
2025 return error_mark_node;
2027 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2028 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2029 TYPE_MAIN_VARIANT (orig)))
2030 return fold_build1 (NOP_EXPR, type, arg);
2032 switch (TREE_CODE (type))
2034 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2035 case POINTER_TYPE: case REFERENCE_TYPE:
2037 if (TREE_CODE (arg) == INTEGER_CST)
2039 tem = fold_convert_const (NOP_EXPR, type, arg);
2040 if (tem != NULL_TREE)
2043 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2044 || TREE_CODE (orig) == OFFSET_TYPE)
2045 return fold_build1 (NOP_EXPR, type, arg);
2046 if (TREE_CODE (orig) == COMPLEX_TYPE)
2048 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2049 return fold_convert (type, tem);
2051 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2052 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2053 return fold_build1 (NOP_EXPR, type, arg);
2056 if (TREE_CODE (arg) == INTEGER_CST)
2058 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2059 if (tem != NULL_TREE)
2062 else if (TREE_CODE (arg) == REAL_CST)
2064 tem = fold_convert_const (NOP_EXPR, type, arg);
2065 if (tem != NULL_TREE)
2069 switch (TREE_CODE (orig))
2072 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2073 case POINTER_TYPE: case REFERENCE_TYPE:
2074 return fold_build1 (FLOAT_EXPR, type, arg);
2077 return fold_build1 (NOP_EXPR, type, arg);
2080 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2081 return fold_convert (type, tem);
2088 switch (TREE_CODE (orig))
2091 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2092 case POINTER_TYPE: case REFERENCE_TYPE:
2094 return build2 (COMPLEX_EXPR, type,
2095 fold_convert (TREE_TYPE (type), arg),
2096 fold_convert (TREE_TYPE (type), integer_zero_node));
2101 if (TREE_CODE (arg) == COMPLEX_EXPR)
2103 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2104 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2105 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2108 arg = save_expr (arg);
2109 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2110 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2111 rpart = fold_convert (TREE_TYPE (type), rpart);
2112 ipart = fold_convert (TREE_TYPE (type), ipart);
2113 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2121 if (integer_zerop (arg))
2122 return build_zero_vector (type);
2123 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2124 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2125 || TREE_CODE (orig) == VECTOR_TYPE);
2126 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2129 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2136 /* Return false if expr can be assumed not to be an lvalue, true
2140 maybe_lvalue_p (tree x)
2142 /* We only need to wrap lvalue tree codes. */
2143 switch (TREE_CODE (x))
2154 case ALIGN_INDIRECT_REF:
2155 case MISALIGNED_INDIRECT_REF:
2157 case ARRAY_RANGE_REF:
2163 case PREINCREMENT_EXPR:
2164 case PREDECREMENT_EXPR:
2166 case TRY_CATCH_EXPR:
2167 case WITH_CLEANUP_EXPR:
2178 /* Assume the worst for front-end tree codes. */
2179 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2187 /* Return an expr equal to X but certainly not valid as an lvalue. */
2192 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2197 if (! maybe_lvalue_p (x))
2199 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2202 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2203 Zero means allow extended lvalues. */
2205 int pedantic_lvalues;
2207 /* When pedantic, return an expr equal to X but certainly not valid as a
2208 pedantic lvalue. Otherwise, return X. */
2211 pedantic_non_lvalue (tree x)
2213 if (pedantic_lvalues)
2214 return non_lvalue (x);
2219 /* Given a tree comparison code, return the code that is the logical inverse
2220 of the given code. It is not safe to do this for floating-point
2221 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2222 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2225 invert_tree_comparison (enum tree_code code, bool honor_nans)
2227 if (honor_nans && flag_trapping_math)
2237 return honor_nans ? UNLE_EXPR : LE_EXPR;
2239 return honor_nans ? UNLT_EXPR : LT_EXPR;
2241 return honor_nans ? UNGE_EXPR : GE_EXPR;
2243 return honor_nans ? UNGT_EXPR : GT_EXPR;
2257 return UNORDERED_EXPR;
2258 case UNORDERED_EXPR:
2259 return ORDERED_EXPR;
2265 /* Similar, but return the comparison that results if the operands are
2266 swapped. This is safe for floating-point. */
2269 swap_tree_comparison (enum tree_code code)
2276 case UNORDERED_EXPR:
2302 /* Convert a comparison tree code from an enum tree_code representation
2303 into a compcode bit-based encoding. This function is the inverse of
2304 compcode_to_comparison. */
2306 static enum comparison_code
2307 comparison_to_compcode (enum tree_code code)
2324 return COMPCODE_ORD;
2325 case UNORDERED_EXPR:
2326 return COMPCODE_UNORD;
2328 return COMPCODE_UNLT;
2330 return COMPCODE_UNEQ;
2332 return COMPCODE_UNLE;
2334 return COMPCODE_UNGT;
2336 return COMPCODE_LTGT;
2338 return COMPCODE_UNGE;
2344 /* Convert a compcode bit-based encoding of a comparison operator back
2345 to GCC's enum tree_code representation. This function is the
2346 inverse of comparison_to_compcode. */
2348 static enum tree_code
2349 compcode_to_comparison (enum comparison_code code)
2366 return ORDERED_EXPR;
2367 case COMPCODE_UNORD:
2368 return UNORDERED_EXPR;
2386 /* Return a tree for the comparison which is the combination of
2387 doing the AND or OR (depending on CODE) of the two operations LCODE
2388 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2389 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2390 if this makes the transformation invalid. */
2393 combine_comparisons (enum tree_code code, enum tree_code lcode,
2394 enum tree_code rcode, tree truth_type,
2395 tree ll_arg, tree lr_arg)
2397 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2398 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2399 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2400 enum comparison_code compcode;
2404 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2405 compcode = lcompcode & rcompcode;
2408 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2409 compcode = lcompcode | rcompcode;
2418 /* Eliminate unordered comparisons, as well as LTGT and ORD
2419 which are not used unless the mode has NaNs. */
2420 compcode &= ~COMPCODE_UNORD;
2421 if (compcode == COMPCODE_LTGT)
2422 compcode = COMPCODE_NE;
2423 else if (compcode == COMPCODE_ORD)
2424 compcode = COMPCODE_TRUE;
2426 else if (flag_trapping_math)
2428 /* Check that the original operation and the optimized ones will trap
2429 under the same condition. */
2430 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2431 && (lcompcode != COMPCODE_EQ)
2432 && (lcompcode != COMPCODE_ORD);
2433 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2434 && (rcompcode != COMPCODE_EQ)
2435 && (rcompcode != COMPCODE_ORD);
2436 bool trap = (compcode & COMPCODE_UNORD) == 0
2437 && (compcode != COMPCODE_EQ)
2438 && (compcode != COMPCODE_ORD);
2440 /* In a short-circuited boolean expression the LHS might be
2441 such that the RHS, if evaluated, will never trap. For
2442 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2443 if neither x nor y is NaN. (This is a mixed blessing: for
2444 example, the expression above will never trap, hence
2445 optimizing it to x < y would be invalid). */
2446 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2447 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2450 /* If the comparison was short-circuited, and only the RHS
2451 trapped, we may now generate a spurious trap. */
2453 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2456 /* If we changed the conditions that cause a trap, we lose. */
2457 if ((ltrap || rtrap) != trap)
2461 if (compcode == COMPCODE_TRUE)
2462 return constant_boolean_node (true, truth_type);
2463 else if (compcode == COMPCODE_FALSE)
2464 return constant_boolean_node (false, truth_type);
2466 return fold_build2 (compcode_to_comparison (compcode),
2467 truth_type, ll_arg, lr_arg);
2470 /* Return nonzero if CODE is a tree code that represents a truth value. */
2473 truth_value_p (enum tree_code code)
2475 return (TREE_CODE_CLASS (code) == tcc_comparison
2476 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2477 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2478 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2481 /* Return nonzero if two operands (typically of the same tree node)
2482 are necessarily equal. If either argument has side-effects this
2483 function returns zero. FLAGS modifies behavior as follows:
2485 If OEP_ONLY_CONST is set, only return nonzero for constants.
2486 This function tests whether the operands are indistinguishable;
2487 it does not test whether they are equal using C's == operation.
2488 The distinction is important for IEEE floating point, because
2489 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2490 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2492 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2493 even though it may hold multiple values during a function.
2494 This is because a GCC tree node guarantees that nothing else is
2495 executed between the evaluation of its "operands" (which may often
2496 be evaluated in arbitrary order). Hence if the operands themselves
2497 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2498 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2499 unset means assuming isochronic (or instantaneous) tree equivalence.
2500 Unless comparing arbitrary expression trees, such as from different
2501 statements, this flag can usually be left unset.
2503 If OEP_PURE_SAME is set, then pure functions with identical arguments
2504 are considered the same. It is used when the caller has other ways
2505 to ensure that global memory is unchanged in between. */
2508 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2510 /* If either is ERROR_MARK, they aren't equal. */
2511 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2514 /* If both types don't have the same signedness, then we can't consider
2515 them equal. We must check this before the STRIP_NOPS calls
2516 because they may change the signedness of the arguments. */
2517 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2520 /* If both types don't have the same precision, then it is not safe
2522 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2528 /* In case both args are comparisons but with different comparison
2529 code, try to swap the comparison operands of one arg to produce
2530 a match and compare that variant. */
2531 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2532 && COMPARISON_CLASS_P (arg0)
2533 && COMPARISON_CLASS_P (arg1))
2535 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2537 if (TREE_CODE (arg0) == swap_code)
2538 return operand_equal_p (TREE_OPERAND (arg0, 0),
2539 TREE_OPERAND (arg1, 1), flags)
2540 && operand_equal_p (TREE_OPERAND (arg0, 1),
2541 TREE_OPERAND (arg1, 0), flags);
2544 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2545 /* This is needed for conversions and for COMPONENT_REF.
2546 Might as well play it safe and always test this. */
2547 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2548 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2549 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2552 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2553 We don't care about side effects in that case because the SAVE_EXPR
2554 takes care of that for us. In all other cases, two expressions are
2555 equal if they have no side effects. If we have two identical
2556 expressions with side effects that should be treated the same due
2557 to the only side effects being identical SAVE_EXPR's, that will
2558 be detected in the recursive calls below. */
2559 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2560 && (TREE_CODE (arg0) == SAVE_EXPR
2561 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2564 /* Next handle constant cases, those for which we can return 1 even
2565 if ONLY_CONST is set. */
2566 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2567 switch (TREE_CODE (arg0))
2570 return tree_int_cst_equal (arg0, arg1);
2573 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2574 TREE_REAL_CST (arg1));
2580 v1 = TREE_VECTOR_CST_ELTS (arg0);
2581 v2 = TREE_VECTOR_CST_ELTS (arg1);
2584 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2587 v1 = TREE_CHAIN (v1);
2588 v2 = TREE_CHAIN (v2);
2595 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2597 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2601 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2602 && ! memcmp (TREE_STRING_POINTER (arg0),
2603 TREE_STRING_POINTER (arg1),
2604 TREE_STRING_LENGTH (arg0)));
2607 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2613 if (flags & OEP_ONLY_CONST)
2616 /* Define macros to test an operand from arg0 and arg1 for equality and a
2617 variant that allows null and views null as being different from any
2618 non-null value. In the latter case, if either is null, the both
2619 must be; otherwise, do the normal comparison. */
2620 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2621 TREE_OPERAND (arg1, N), flags)
2623 #define OP_SAME_WITH_NULL(N) \
2624 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2625 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2627 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2630 /* Two conversions are equal only if signedness and modes match. */
2631 switch (TREE_CODE (arg0))
2635 case FIX_TRUNC_EXPR:
2636 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2637 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2647 case tcc_comparison:
2649 if (OP_SAME (0) && OP_SAME (1))
2652 /* For commutative ops, allow the other order. */
2653 return (commutative_tree_code (TREE_CODE (arg0))
2654 && operand_equal_p (TREE_OPERAND (arg0, 0),
2655 TREE_OPERAND (arg1, 1), flags)
2656 && operand_equal_p (TREE_OPERAND (arg0, 1),
2657 TREE_OPERAND (arg1, 0), flags));
2660 /* If either of the pointer (or reference) expressions we are
2661 dereferencing contain a side effect, these cannot be equal. */
2662 if (TREE_SIDE_EFFECTS (arg0)
2663 || TREE_SIDE_EFFECTS (arg1))
2666 switch (TREE_CODE (arg0))
2669 case ALIGN_INDIRECT_REF:
2670 case MISALIGNED_INDIRECT_REF:
2676 case ARRAY_RANGE_REF:
2677 /* Operands 2 and 3 may be null. */
2680 && OP_SAME_WITH_NULL (2)
2681 && OP_SAME_WITH_NULL (3));
2684 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2685 may be NULL when we're called to compare MEM_EXPRs. */
2686 return OP_SAME_WITH_NULL (0)
2688 && OP_SAME_WITH_NULL (2);
2691 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2697 case tcc_expression:
2698 switch (TREE_CODE (arg0))
2701 case TRUTH_NOT_EXPR:
2704 case TRUTH_ANDIF_EXPR:
2705 case TRUTH_ORIF_EXPR:
2706 return OP_SAME (0) && OP_SAME (1);
2708 case TRUTH_AND_EXPR:
2710 case TRUTH_XOR_EXPR:
2711 if (OP_SAME (0) && OP_SAME (1))
2714 /* Otherwise take into account this is a commutative operation. */
2715 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2716 TREE_OPERAND (arg1, 1), flags)
2717 && operand_equal_p (TREE_OPERAND (arg0, 1),
2718 TREE_OPERAND (arg1, 0), flags));
2721 /* If the CALL_EXPRs call different functions, then they
2722 clearly can not be equal. */
2727 unsigned int cef = call_expr_flags (arg0);
2728 if (flags & OEP_PURE_SAME)
2729 cef &= ECF_CONST | ECF_PURE;
2736 /* Now see if all the arguments are the same. operand_equal_p
2737 does not handle TREE_LIST, so we walk the operands here
2738 feeding them to operand_equal_p. */
2739 arg0 = TREE_OPERAND (arg0, 1);
2740 arg1 = TREE_OPERAND (arg1, 1);
2741 while (arg0 && arg1)
2743 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2747 arg0 = TREE_CHAIN (arg0);
2748 arg1 = TREE_CHAIN (arg1);
2751 /* If we get here and both argument lists are exhausted
2752 then the CALL_EXPRs are equal. */
2753 return ! (arg0 || arg1);
2759 case tcc_declaration:
2760 /* Consider __builtin_sqrt equal to sqrt. */
2761 return (TREE_CODE (arg0) == FUNCTION_DECL
2762 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2763 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2764 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2771 #undef OP_SAME_WITH_NULL
2774 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2775 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2777 When in doubt, return 0. */
2780 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2782 int unsignedp1, unsignedpo;
2783 tree primarg0, primarg1, primother;
2784 unsigned int correct_width;
2786 if (operand_equal_p (arg0, arg1, 0))
2789 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2790 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2793 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2794 and see if the inner values are the same. This removes any
2795 signedness comparison, which doesn't matter here. */
2796 primarg0 = arg0, primarg1 = arg1;
2797 STRIP_NOPS (primarg0);
2798 STRIP_NOPS (primarg1);
2799 if (operand_equal_p (primarg0, primarg1, 0))
2802 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2803 actual comparison operand, ARG0.
2805 First throw away any conversions to wider types
2806 already present in the operands. */
2808 primarg1 = get_narrower (arg1, &unsignedp1);
2809 primother = get_narrower (other, &unsignedpo);
2811 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2812 if (unsignedp1 == unsignedpo
2813 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2814 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2816 tree type = TREE_TYPE (arg0);
2818 /* Make sure shorter operand is extended the right way
2819 to match the longer operand. */
2820 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2821 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2823 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2830 /* See if ARG is an expression that is either a comparison or is performing
2831 arithmetic on comparisons. The comparisons must only be comparing
2832 two different values, which will be stored in *CVAL1 and *CVAL2; if
2833 they are nonzero it means that some operands have already been found.
2834 No variables may be used anywhere else in the expression except in the
2835 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2836 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2838 If this is true, return 1. Otherwise, return zero. */
2841 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2843 enum tree_code code = TREE_CODE (arg);
2844 enum tree_code_class class = TREE_CODE_CLASS (code);
2846 /* We can handle some of the tcc_expression cases here. */
2847 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2849 else if (class == tcc_expression
2850 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2851 || code == COMPOUND_EXPR))
2854 else if (class == tcc_expression && code == SAVE_EXPR
2855 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2857 /* If we've already found a CVAL1 or CVAL2, this expression is
2858 two complex to handle. */
2859 if (*cval1 || *cval2)
2869 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2872 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2873 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2874 cval1, cval2, save_p));
2879 case tcc_expression:
2880 if (code == COND_EXPR)
2881 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2882 cval1, cval2, save_p)
2883 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2884 cval1, cval2, save_p)
2885 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2886 cval1, cval2, save_p));
2889 case tcc_comparison:
2890 /* First see if we can handle the first operand, then the second. For
2891 the second operand, we know *CVAL1 can't be zero. It must be that
2892 one side of the comparison is each of the values; test for the
2893 case where this isn't true by failing if the two operands
2896 if (operand_equal_p (TREE_OPERAND (arg, 0),
2897 TREE_OPERAND (arg, 1), 0))
2901 *cval1 = TREE_OPERAND (arg, 0);
2902 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2904 else if (*cval2 == 0)
2905 *cval2 = TREE_OPERAND (arg, 0);
2906 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2911 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2913 else if (*cval2 == 0)
2914 *cval2 = TREE_OPERAND (arg, 1);
2915 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2927 /* ARG is a tree that is known to contain just arithmetic operations and
2928 comparisons. Evaluate the operations in the tree substituting NEW0 for
2929 any occurrence of OLD0 as an operand of a comparison and likewise for
2933 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2935 tree type = TREE_TYPE (arg);
2936 enum tree_code code = TREE_CODE (arg);
2937 enum tree_code_class class = TREE_CODE_CLASS (code);
2939 /* We can handle some of the tcc_expression cases here. */
2940 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2942 else if (class == tcc_expression
2943 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2949 return fold_build1 (code, type,
2950 eval_subst (TREE_OPERAND (arg, 0),
2951 old0, new0, old1, new1));
2954 return fold_build2 (code, type,
2955 eval_subst (TREE_OPERAND (arg, 0),
2956 old0, new0, old1, new1),
2957 eval_subst (TREE_OPERAND (arg, 1),
2958 old0, new0, old1, new1));
2960 case tcc_expression:
2964 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2967 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2970 return fold_build3 (code, type,
2971 eval_subst (TREE_OPERAND (arg, 0),
2972 old0, new0, old1, new1),
2973 eval_subst (TREE_OPERAND (arg, 1),
2974 old0, new0, old1, new1),
2975 eval_subst (TREE_OPERAND (arg, 2),
2976 old0, new0, old1, new1));
2980 /* Fall through - ??? */
2982 case tcc_comparison:
2984 tree arg0 = TREE_OPERAND (arg, 0);
2985 tree arg1 = TREE_OPERAND (arg, 1);
2987 /* We need to check both for exact equality and tree equality. The
2988 former will be true if the operand has a side-effect. In that
2989 case, we know the operand occurred exactly once. */
2991 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2993 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2996 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2998 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3001 return fold_build2 (code, type, arg0, arg1);
3009 /* Return a tree for the case when the result of an expression is RESULT
3010 converted to TYPE and OMITTED was previously an operand of the expression
3011 but is now not needed (e.g., we folded OMITTED * 0).
3013 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3014 the conversion of RESULT to TYPE. */
3017 omit_one_operand (tree type, tree result, tree omitted)
3019 tree t = fold_convert (type, result);
3021 if (TREE_SIDE_EFFECTS (omitted))
3022 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3024 return non_lvalue (t);
3027 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3030 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3032 tree t = fold_convert (type, result);
3034 if (TREE_SIDE_EFFECTS (omitted))
3035 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3037 return pedantic_non_lvalue (t);
3040 /* Return a tree for the case when the result of an expression is RESULT
3041 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3042 of the expression but are now not needed.
3044 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3045 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3046 evaluated before OMITTED2. Otherwise, if neither has side effects,
3047 just do the conversion of RESULT to TYPE. */
3050 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3052 tree t = fold_convert (type, result);
3054 if (TREE_SIDE_EFFECTS (omitted2))
3055 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3056 if (TREE_SIDE_EFFECTS (omitted1))
3057 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3059 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3063 /* Return a simplified tree node for the truth-negation of ARG. This
3064 never alters ARG itself. We assume that ARG is an operation that
3065 returns a truth value (0 or 1).
3067 FIXME: one would think we would fold the result, but it causes
3068 problems with the dominator optimizer. */
3071 fold_truth_not_expr (tree arg)
3073 tree type = TREE_TYPE (arg);
3074 enum tree_code code = TREE_CODE (arg);
3076 /* If this is a comparison, we can simply invert it, except for
3077 floating-point non-equality comparisons, in which case we just
3078 enclose a TRUTH_NOT_EXPR around what we have. */
3080 if (TREE_CODE_CLASS (code) == tcc_comparison)
3082 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3083 if (FLOAT_TYPE_P (op_type)
3084 && flag_trapping_math
3085 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3086 && code != NE_EXPR && code != EQ_EXPR)
3090 code = invert_tree_comparison (code,
3091 HONOR_NANS (TYPE_MODE (op_type)));
3092 if (code == ERROR_MARK)
3095 return build2 (code, type,
3096 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3103 return constant_boolean_node (integer_zerop (arg), type);
3105 case TRUTH_AND_EXPR:
3106 return build2 (TRUTH_OR_EXPR, type,
3107 invert_truthvalue (TREE_OPERAND (arg, 0)),
3108 invert_truthvalue (TREE_OPERAND (arg, 1)));
3111 return build2 (TRUTH_AND_EXPR, type,
3112 invert_truthvalue (TREE_OPERAND (arg, 0)),
3113 invert_truthvalue (TREE_OPERAND (arg, 1)));
3115 case TRUTH_XOR_EXPR:
3116 /* Here we can invert either operand. We invert the first operand
3117 unless the second operand is a TRUTH_NOT_EXPR in which case our
3118 result is the XOR of the first operand with the inside of the
3119 negation of the second operand. */
3121 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3122 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3123 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3125 return build2 (TRUTH_XOR_EXPR, type,
3126 invert_truthvalue (TREE_OPERAND (arg, 0)),
3127 TREE_OPERAND (arg, 1));
3129 case TRUTH_ANDIF_EXPR:
3130 return build2 (TRUTH_ORIF_EXPR, type,
3131 invert_truthvalue (TREE_OPERAND (arg, 0)),
3132 invert_truthvalue (TREE_OPERAND (arg, 1)));
3134 case TRUTH_ORIF_EXPR:
3135 return build2 (TRUTH_ANDIF_EXPR, type,
3136 invert_truthvalue (TREE_OPERAND (arg, 0)),
3137 invert_truthvalue (TREE_OPERAND (arg, 1)));
3139 case TRUTH_NOT_EXPR:
3140 return TREE_OPERAND (arg, 0);
3144 tree arg1 = TREE_OPERAND (arg, 1);
3145 tree arg2 = TREE_OPERAND (arg, 2);
3146 /* A COND_EXPR may have a throw as one operand, which
3147 then has void type. Just leave void operands
3149 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3150 VOID_TYPE_P (TREE_TYPE (arg1))
3151 ? arg1 : invert_truthvalue (arg1),
3152 VOID_TYPE_P (TREE_TYPE (arg2))
3153 ? arg2 : invert_truthvalue (arg2));
3157 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3158 invert_truthvalue (TREE_OPERAND (arg, 1)));
3160 case NON_LVALUE_EXPR:
3161 return invert_truthvalue (TREE_OPERAND (arg, 0));
3164 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3165 return build1 (TRUTH_NOT_EXPR, type, arg);
3169 return build1 (TREE_CODE (arg), type,
3170 invert_truthvalue (TREE_OPERAND (arg, 0)));
3173 if (!integer_onep (TREE_OPERAND (arg, 1)))
3175 return build2 (EQ_EXPR, type, arg,
3176 build_int_cst (type, 0));
3179 return build1 (TRUTH_NOT_EXPR, type, arg);
3181 case CLEANUP_POINT_EXPR:
3182 return build1 (CLEANUP_POINT_EXPR, type,
3183 invert_truthvalue (TREE_OPERAND (arg, 0)));
3192 /* Return a simplified tree node for the truth-negation of ARG. This
3193 never alters ARG itself. We assume that ARG is an operation that
3194 returns a truth value (0 or 1).
3196 FIXME: one would think we would fold the result, but it causes
3197 problems with the dominator optimizer. */
3200 invert_truthvalue (tree arg)
3204 if (TREE_CODE (arg) == ERROR_MARK)
3207 tem = fold_truth_not_expr (arg);
3209 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3214 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3215 operands are another bit-wise operation with a common input. If so,
3216 distribute the bit operations to save an operation and possibly two if
3217 constants are involved. For example, convert
3218 (A | B) & (A | C) into A | (B & C)
3219 Further simplification will occur if B and C are constants.
3221 If this optimization cannot be done, 0 will be returned. */
3224 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3229 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3230 || TREE_CODE (arg0) == code
3231 || (TREE_CODE (arg0) != BIT_AND_EXPR
3232 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3235 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3237 common = TREE_OPERAND (arg0, 0);
3238 left = TREE_OPERAND (arg0, 1);
3239 right = TREE_OPERAND (arg1, 1);
3241 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3243 common = TREE_OPERAND (arg0, 0);
3244 left = TREE_OPERAND (arg0, 1);
3245 right = TREE_OPERAND (arg1, 0);
3247 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3249 common = TREE_OPERAND (arg0, 1);
3250 left = TREE_OPERAND (arg0, 0);
3251 right = TREE_OPERAND (arg1, 1);
3253 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3255 common = TREE_OPERAND (arg0, 1);
3256 left = TREE_OPERAND (arg0, 0);
3257 right = TREE_OPERAND (arg1, 0);
3262 return fold_build2 (TREE_CODE (arg0), type, common,
3263 fold_build2 (code, type, left, right));
3266 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3267 with code CODE. This optimization is unsafe. */
3269 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3271 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3272 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3274 /* (A / C) +- (B / C) -> (A +- B) / C. */
3276 && operand_equal_p (TREE_OPERAND (arg0, 1),
3277 TREE_OPERAND (arg1, 1), 0))
3278 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3279 fold_build2 (code, type,
3280 TREE_OPERAND (arg0, 0),
3281 TREE_OPERAND (arg1, 0)),
3282 TREE_OPERAND (arg0, 1));
3284 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3285 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3286 TREE_OPERAND (arg1, 0), 0)
3287 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3288 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3290 REAL_VALUE_TYPE r0, r1;
3291 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3292 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3294 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3296 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3297 real_arithmetic (&r0, code, &r0, &r1);
3298 return fold_build2 (MULT_EXPR, type,
3299 TREE_OPERAND (arg0, 0),
3300 build_real (type, r0));
3306 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3307 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3310 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3317 tree size = TYPE_SIZE (TREE_TYPE (inner));
3318 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3319 || POINTER_TYPE_P (TREE_TYPE (inner)))
3320 && host_integerp (size, 0)
3321 && tree_low_cst (size, 0) == bitsize)
3322 return fold_convert (type, inner);
3325 result = build3 (BIT_FIELD_REF, type, inner,
3326 size_int (bitsize), bitsize_int (bitpos));
3328 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3333 /* Optimize a bit-field compare.
3335 There are two cases: First is a compare against a constant and the
3336 second is a comparison of two items where the fields are at the same
3337 bit position relative to the start of a chunk (byte, halfword, word)
3338 large enough to contain it. In these cases we can avoid the shift
3339 implicit in bitfield extractions.
3341 For constants, we emit a compare of the shifted constant with the
3342 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3343 compared. For two fields at the same position, we do the ANDs with the
3344 similar mask and compare the result of the ANDs.
3346 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3347 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3348 are the left and right operands of the comparison, respectively.
3350 If the optimization described above can be done, we return the resulting
3351 tree. Otherwise we return zero. */
3354 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3357 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3358 tree type = TREE_TYPE (lhs);
3359 tree signed_type, unsigned_type;
3360 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3361 enum machine_mode lmode, rmode, nmode;
3362 int lunsignedp, runsignedp;
3363 int lvolatilep = 0, rvolatilep = 0;
3364 tree linner, rinner = NULL_TREE;
3368 /* Get all the information about the extractions being done. If the bit size
3369 if the same as the size of the underlying object, we aren't doing an
3370 extraction at all and so can do nothing. We also don't want to
3371 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3372 then will no longer be able to replace it. */
3373 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3374 &lunsignedp, &lvolatilep, false);
3375 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3376 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3381 /* If this is not a constant, we can only do something if bit positions,
3382 sizes, and signedness are the same. */
3383 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3384 &runsignedp, &rvolatilep, false);
3386 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3387 || lunsignedp != runsignedp || offset != 0
3388 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3392 /* See if we can find a mode to refer to this field. We should be able to,
3393 but fail if we can't. */
3394 nmode = get_best_mode (lbitsize, lbitpos,
3395 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3396 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3397 TYPE_ALIGN (TREE_TYPE (rinner))),
3398 word_mode, lvolatilep || rvolatilep);
3399 if (nmode == VOIDmode)
3402 /* Set signed and unsigned types of the precision of this mode for the
3404 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3405 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3407 /* Compute the bit position and size for the new reference and our offset
3408 within it. If the new reference is the same size as the original, we
3409 won't optimize anything, so return zero. */
3410 nbitsize = GET_MODE_BITSIZE (nmode);
3411 nbitpos = lbitpos & ~ (nbitsize - 1);
3413 if (nbitsize == lbitsize)
3416 if (BYTES_BIG_ENDIAN)
3417 lbitpos = nbitsize - lbitsize - lbitpos;
3419 /* Make the mask to be used against the extracted field. */
3420 mask = build_int_cst (unsigned_type, -1);
3421 mask = force_fit_type (mask, 0, false, false);
3422 mask = fold_convert (unsigned_type, mask);
3423 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3424 mask = const_binop (RSHIFT_EXPR, mask,
3425 size_int (nbitsize - lbitsize - lbitpos), 0);
3428 /* If not comparing with constant, just rework the comparison
3430 return build2 (code, compare_type,
3431 build2 (BIT_AND_EXPR, unsigned_type,
3432 make_bit_field_ref (linner, unsigned_type,
3433 nbitsize, nbitpos, 1),
3435 build2 (BIT_AND_EXPR, unsigned_type,
3436 make_bit_field_ref (rinner, unsigned_type,
3437 nbitsize, nbitpos, 1),
3440 /* Otherwise, we are handling the constant case. See if the constant is too
3441 big for the field. Warn and return a tree of for 0 (false) if so. We do
3442 this not only for its own sake, but to avoid having to test for this
3443 error case below. If we didn't, we might generate wrong code.
3445 For unsigned fields, the constant shifted right by the field length should
3446 be all zero. For signed fields, the high-order bits should agree with
3451 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3452 fold_convert (unsigned_type, rhs),
3453 size_int (lbitsize), 0)))
3455 warning (0, "comparison is always %d due to width of bit-field",
3457 return constant_boolean_node (code == NE_EXPR, compare_type);
3462 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3463 size_int (lbitsize - 1), 0);
3464 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3466 warning (0, "comparison is always %d due to width of bit-field",
3468 return constant_boolean_node (code == NE_EXPR, compare_type);
3472 /* Single-bit compares should always be against zero. */
3473 if (lbitsize == 1 && ! integer_zerop (rhs))
3475 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3476 rhs = build_int_cst (type, 0);
3479 /* Make a new bitfield reference, shift the constant over the
3480 appropriate number of bits and mask it with the computed mask
3481 (in case this was a signed field). If we changed it, make a new one. */
3482 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3485 TREE_SIDE_EFFECTS (lhs) = 1;
3486 TREE_THIS_VOLATILE (lhs) = 1;
3489 rhs = const_binop (BIT_AND_EXPR,
3490 const_binop (LSHIFT_EXPR,
3491 fold_convert (unsigned_type, rhs),
3492 size_int (lbitpos), 0),
3495 return build2 (code, compare_type,
3496 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3500 /* Subroutine for fold_truthop: decode a field reference.
3502 If EXP is a comparison reference, we return the innermost reference.
3504 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3505 set to the starting bit number.
3507 If the innermost field can be completely contained in a mode-sized
3508 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3510 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3511 otherwise it is not changed.
3513 *PUNSIGNEDP is set to the signedness of the field.
3515 *PMASK is set to the mask used. This is either contained in a
3516 BIT_AND_EXPR or derived from the width of the field.
3518 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3520 Return 0 if this is not a component reference or is one that we can't
3521 do anything with. */
3524 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3525 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3526 int *punsignedp, int *pvolatilep,
3527 tree *pmask, tree *pand_mask)
3529 tree outer_type = 0;
3531 tree mask, inner, offset;
3533 unsigned int precision;
3535 /* All the optimizations using this function assume integer fields.
3536 There are problems with FP fields since the type_for_size call
3537 below can fail for, e.g., XFmode. */
3538 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3541 /* We are interested in the bare arrangement of bits, so strip everything
3542 that doesn't affect the machine mode. However, record the type of the
3543 outermost expression if it may matter below. */
3544 if (TREE_CODE (exp) == NOP_EXPR
3545 || TREE_CODE (exp) == CONVERT_EXPR
3546 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3547 outer_type = TREE_TYPE (exp);
3550 if (TREE_CODE (exp) == BIT_AND_EXPR)
3552 and_mask = TREE_OPERAND (exp, 1);
3553 exp = TREE_OPERAND (exp, 0);
3554 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3555 if (TREE_CODE (and_mask) != INTEGER_CST)
3559 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3560 punsignedp, pvolatilep, false);
3561 if ((inner == exp && and_mask == 0)
3562 || *pbitsize < 0 || offset != 0
3563 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3566 /* If the number of bits in the reference is the same as the bitsize of
3567 the outer type, then the outer type gives the signedness. Otherwise
3568 (in case of a small bitfield) the signedness is unchanged. */
3569 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3570 *punsignedp = TYPE_UNSIGNED (outer_type);
3572 /* Compute the mask to access the bitfield. */
3573 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3574 precision = TYPE_PRECISION (unsigned_type);
3576 mask = build_int_cst (unsigned_type, -1);
3577 mask = force_fit_type (mask, 0, false, false);
3579 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3580 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3582 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3584 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3585 fold_convert (unsigned_type, and_mask), mask);
3588 *pand_mask = and_mask;
3592 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3596 all_ones_mask_p (tree mask, int size)
3598 tree type = TREE_TYPE (mask);
3599 unsigned int precision = TYPE_PRECISION (type);
3602 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3603 tmask = force_fit_type (tmask, 0, false, false);
3606 tree_int_cst_equal (mask,
3607 const_binop (RSHIFT_EXPR,
3608 const_binop (LSHIFT_EXPR, tmask,
3609 size_int (precision - size),
3611 size_int (precision - size), 0));
3614 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3615 represents the sign bit of EXP's type. If EXP represents a sign
3616 or zero extension, also test VAL against the unextended type.
3617 The return value is the (sub)expression whose sign bit is VAL,
3618 or NULL_TREE otherwise. */
3621 sign_bit_p (tree exp, tree val)
3623 unsigned HOST_WIDE_INT mask_lo, lo;
3624 HOST_WIDE_INT mask_hi, hi;
3628 /* Tree EXP must have an integral type. */
3629 t = TREE_TYPE (exp);
3630 if (! INTEGRAL_TYPE_P (t))
3633 /* Tree VAL must be an integer constant. */
3634 if (TREE_CODE (val) != INTEGER_CST
3635 || TREE_CONSTANT_OVERFLOW (val))
3638 width = TYPE_PRECISION (t);
3639 if (width > HOST_BITS_PER_WIDE_INT)
3641 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3644 mask_hi = ((unsigned HOST_WIDE_INT) -1
3645 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3651 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3654 mask_lo = ((unsigned HOST_WIDE_INT) -1
3655 >> (HOST_BITS_PER_WIDE_INT - width));
3658 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3659 treat VAL as if it were unsigned. */
3660 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3661 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3664 /* Handle extension from a narrower type. */
3665 if (TREE_CODE (exp) == NOP_EXPR
3666 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3667 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3672 /* Subroutine for fold_truthop: determine if an operand is simple enough
3673 to be evaluated unconditionally. */
3676 simple_operand_p (tree exp)
3678 /* Strip any conversions that don't change the machine mode. */
3681 return (CONSTANT_CLASS_P (exp)
3682 || TREE_CODE (exp) == SSA_NAME
3684 && ! TREE_ADDRESSABLE (exp)
3685 && ! TREE_THIS_VOLATILE (exp)
3686 && ! DECL_NONLOCAL (exp)
3687 /* Don't regard global variables as simple. They may be
3688 allocated in ways unknown to the compiler (shared memory,
3689 #pragma weak, etc). */
3690 && ! TREE_PUBLIC (exp)
3691 && ! DECL_EXTERNAL (exp)
3692 /* Loading a static variable is unduly expensive, but global
3693 registers aren't expensive. */
3694 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3697 /* The following functions are subroutines to fold_range_test and allow it to
3698 try to change a logical combination of comparisons into a range test.
3701 X == 2 || X == 3 || X == 4 || X == 5
3705 (unsigned) (X - 2) <= 3
3707 We describe each set of comparisons as being either inside or outside
3708 a range, using a variable named like IN_P, and then describe the
3709 range with a lower and upper bound. If one of the bounds is omitted,
3710 it represents either the highest or lowest value of the type.
3712 In the comments below, we represent a range by two numbers in brackets
3713 preceded by a "+" to designate being inside that range, or a "-" to
3714 designate being outside that range, so the condition can be inverted by
3715 flipping the prefix. An omitted bound is represented by a "-". For
3716 example, "- [-, 10]" means being outside the range starting at the lowest
3717 possible value and ending at 10, in other words, being greater than 10.
3718 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3721 We set up things so that the missing bounds are handled in a consistent
3722 manner so neither a missing bound nor "true" and "false" need to be
3723 handled using a special case. */
3725 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3726 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3727 and UPPER1_P are nonzero if the respective argument is an upper bound
3728 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3729 must be specified for a comparison. ARG1 will be converted to ARG0's
3730 type if both are specified. */
3733 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3734 tree arg1, int upper1_p)
3740 /* If neither arg represents infinity, do the normal operation.
3741 Else, if not a comparison, return infinity. Else handle the special
3742 comparison rules. Note that most of the cases below won't occur, but
3743 are handled for consistency. */
3745 if (arg0 != 0 && arg1 != 0)
3747 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3748 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3750 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3753 if (TREE_CODE_CLASS (code) != tcc_comparison)
3756 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3757 for neither. In real maths, we cannot assume open ended ranges are
3758 the same. But, this is computer arithmetic, where numbers are finite.
3759 We can therefore make the transformation of any unbounded range with
3760 the value Z, Z being greater than any representable number. This permits
3761 us to treat unbounded ranges as equal. */
3762 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3763 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3767 result = sgn0 == sgn1;
3770 result = sgn0 != sgn1;
3773 result = sgn0 < sgn1;
3776 result = sgn0 <= sgn1;
3779 result = sgn0 > sgn1;
3782 result = sgn0 >= sgn1;
3788 return constant_boolean_node (result, type);
3791 /* Given EXP, a logical expression, set the range it is testing into
3792 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3793 actually being tested. *PLOW and *PHIGH will be made of the same type
3794 as the returned expression. If EXP is not a comparison, we will most
3795 likely not be returning a useful value and range. */
3798 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3800 enum tree_code code;
3801 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3802 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3804 tree low, high, n_low, n_high;
3806 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3807 and see if we can refine the range. Some of the cases below may not
3808 happen, but it doesn't seem worth worrying about this. We "continue"
3809 the outer loop when we've changed something; otherwise we "break"
3810 the switch, which will "break" the while. */
3813 low = high = build_int_cst (TREE_TYPE (exp), 0);
3817 code = TREE_CODE (exp);
3818 exp_type = TREE_TYPE (exp);
3820 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3822 if (TREE_CODE_LENGTH (code) > 0)
3823 arg0 = TREE_OPERAND (exp, 0);
3824 if (TREE_CODE_CLASS (code) == tcc_comparison
3825 || TREE_CODE_CLASS (code) == tcc_unary
3826 || TREE_CODE_CLASS (code) == tcc_binary)
3827 arg0_type = TREE_TYPE (arg0);
3828 if (TREE_CODE_CLASS (code) == tcc_binary
3829 || TREE_CODE_CLASS (code) == tcc_comparison
3830 || (TREE_CODE_CLASS (code) == tcc_expression
3831 && TREE_CODE_LENGTH (code) > 1))
3832 arg1 = TREE_OPERAND (exp, 1);
3837 case TRUTH_NOT_EXPR:
3838 in_p = ! in_p, exp = arg0;
3841 case EQ_EXPR: case NE_EXPR:
3842 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3843 /* We can only do something if the range is testing for zero
3844 and if the second operand is an integer constant. Note that
3845 saying something is "in" the range we make is done by
3846 complementing IN_P since it will set in the initial case of
3847 being not equal to zero; "out" is leaving it alone. */
3848 if (low == 0 || high == 0
3849 || ! integer_zerop (low) || ! integer_zerop (high)
3850 || TREE_CODE (arg1) != INTEGER_CST)
3855 case NE_EXPR: /* - [c, c] */
3858 case EQ_EXPR: /* + [c, c] */
3859 in_p = ! in_p, low = high = arg1;
3861 case GT_EXPR: /* - [-, c] */
3862 low = 0, high = arg1;
3864 case GE_EXPR: /* + [c, -] */
3865 in_p = ! in_p, low = arg1, high = 0;
3867 case LT_EXPR: /* - [c, -] */
3868 low = arg1, high = 0;
3870 case LE_EXPR: /* + [-, c] */
3871 in_p = ! in_p, low = 0, high = arg1;
3877 /* If this is an unsigned comparison, we also know that EXP is
3878 greater than or equal to zero. We base the range tests we make
3879 on that fact, so we record it here so we can parse existing
3880 range tests. We test arg0_type since often the return type
3881 of, e.g. EQ_EXPR, is boolean. */
3882 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3884 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3886 build_int_cst (arg0_type, 0),
3890 in_p = n_in_p, low = n_low, high = n_high;
3892 /* If the high bound is missing, but we have a nonzero low
3893 bound, reverse the range so it goes from zero to the low bound
3895 if (high == 0 && low && ! integer_zerop (low))
3898 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3899 integer_one_node, 0);
3900 low = build_int_cst (arg0_type, 0);
3908 /* (-x) IN [a,b] -> x in [-b, -a] */
3909 n_low = range_binop (MINUS_EXPR, exp_type,
3910 build_int_cst (exp_type, 0),
3912 n_high = range_binop (MINUS_EXPR, exp_type,
3913 build_int_cst (exp_type, 0),
3915 low = n_low, high = n_high;
3921 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3922 build_int_cst (exp_type, 1));
3925 case PLUS_EXPR: case MINUS_EXPR:
3926 if (TREE_CODE (arg1) != INTEGER_CST)
3929 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3930 move a constant to the other side. */
3931 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3934 /* If EXP is signed, any overflow in the computation is undefined,
3935 so we don't worry about it so long as our computations on
3936 the bounds don't overflow. For unsigned, overflow is defined
3937 and this is exactly the right thing. */
3938 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3939 arg0_type, low, 0, arg1, 0);
3940 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3941 arg0_type, high, 1, arg1, 0);
3942 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3943 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3946 /* Check for an unsigned range which has wrapped around the maximum
3947 value thus making n_high < n_low, and normalize it. */
3948 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3950 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3951 integer_one_node, 0);
3952 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3953 integer_one_node, 0);
3955 /* If the range is of the form +/- [ x+1, x ], we won't
3956 be able to normalize it. But then, it represents the
3957 whole range or the empty set, so make it
3959 if (tree_int_cst_equal (n_low, low)
3960 && tree_int_cst_equal (n_high, high))
3966 low = n_low, high = n_high;
3971 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3972 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3975 if (! INTEGRAL_TYPE_P (arg0_type)
3976 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3977 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3980 n_low = low, n_high = high;
3983 n_low = fold_convert (arg0_type, n_low);
3986 n_high = fold_convert (arg0_type, n_high);
3989 /* If we're converting arg0 from an unsigned type, to exp,
3990 a signed type, we will be doing the comparison as unsigned.
3991 The tests above have already verified that LOW and HIGH
3994 So we have to ensure that we will handle large unsigned
3995 values the same way that the current signed bounds treat
3998 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4001 tree equiv_type = lang_hooks.types.type_for_mode
4002 (TYPE_MODE (arg0_type), 1);
4004 /* A range without an upper bound is, naturally, unbounded.
4005 Since convert would have cropped a very large value, use
4006 the max value for the destination type. */
4008 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4009 : TYPE_MAX_VALUE (arg0_type);
4011 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4012 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4013 fold_convert (arg0_type,
4015 build_int_cst (arg0_type, 1));
4017 /* If the low bound is specified, "and" the range with the
4018 range for which the original unsigned value will be
4022 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4023 1, n_low, n_high, 1,
4024 fold_convert (arg0_type,
4029 in_p = (n_in_p == in_p);
4033 /* Otherwise, "or" the range with the range of the input
4034 that will be interpreted as negative. */
4035 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4036 0, n_low, n_high, 1,
4037 fold_convert (arg0_type,
4042 in_p = (in_p != n_in_p);
4047 low = n_low, high = n_high;
4057 /* If EXP is a constant, we can evaluate whether this is true or false. */
4058 if (TREE_CODE (exp) == INTEGER_CST)
4060 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4062 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4068 *pin_p = in_p, *plow = low, *phigh = high;
4072 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4073 type, TYPE, return an expression to test if EXP is in (or out of, depending
4074 on IN_P) the range. Return 0 if the test couldn't be created. */
4077 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4079 tree etype = TREE_TYPE (exp);
4082 #ifdef HAVE_canonicalize_funcptr_for_compare
4083 /* Disable this optimization for function pointer expressions
4084 on targets that require function pointer canonicalization. */
4085 if (HAVE_canonicalize_funcptr_for_compare
4086 && TREE_CODE (etype) == POINTER_TYPE
4087 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4093 value = build_range_check (type, exp, 1, low, high);
4095 return invert_truthvalue (value);
4100 if (low == 0 && high == 0)
4101 return build_int_cst (type, 1);
4104 return fold_build2 (LE_EXPR, type, exp,
4105 fold_convert (etype, high));
4108 return fold_build2 (GE_EXPR, type, exp,
4109 fold_convert (etype, low));
4111 if (operand_equal_p (low, high, 0))
4112 return fold_build2 (EQ_EXPR, type, exp,
4113 fold_convert (etype, low));
4115 if (integer_zerop (low))
4117 if (! TYPE_UNSIGNED (etype))
4119 etype = lang_hooks.types.unsigned_type (etype);
4120 high = fold_convert (etype, high);
4121 exp = fold_convert (etype, exp);
4123 return build_range_check (type, exp, 1, 0, high);
4126 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4127 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4129 unsigned HOST_WIDE_INT lo;
4133 prec = TYPE_PRECISION (etype);
4134 if (prec <= HOST_BITS_PER_WIDE_INT)
4137 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4141 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4142 lo = (unsigned HOST_WIDE_INT) -1;
4145 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4147 if (TYPE_UNSIGNED (etype))
4149 etype = lang_hooks.types.signed_type (etype);
4150 exp = fold_convert (etype, exp);
4152 return fold_build2 (GT_EXPR, type, exp,
4153 build_int_cst (etype, 0));
4157 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4158 This requires wrap-around arithmetics for the type of the expression. */
4159 switch (TREE_CODE (etype))
4162 /* There is no requirement that LOW be within the range of ETYPE
4163 if the latter is a subtype. It must, however, be within the base
4164 type of ETYPE. So be sure we do the subtraction in that type. */
4165 if (TREE_TYPE (etype))
4166 etype = TREE_TYPE (etype);
4171 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4172 TYPE_UNSIGNED (etype));
4179 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4180 if (TREE_CODE (etype) == INTEGER_TYPE
4181 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4183 tree utype, minv, maxv;
4185 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4186 for the type in question, as we rely on this here. */
4187 utype = lang_hooks.types.unsigned_type (etype);
4188 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4189 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4190 integer_one_node, 1);
4191 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4193 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4200 high = fold_convert (etype, high);
4201 low = fold_convert (etype, low);
4202 exp = fold_convert (etype, exp);
4204 value = const_binop (MINUS_EXPR, high, low, 0);
4206 if (value != 0 && !TREE_OVERFLOW (value))
4207 return build_range_check (type,
4208 fold_build2 (MINUS_EXPR, etype, exp, low),
4209 1, build_int_cst (etype, 0), value);
4214 /* Return the predecessor of VAL in its type, handling the infinite case. */
4217 range_predecessor (tree val)
4219 tree type = TREE_TYPE (val);
4221 if (INTEGRAL_TYPE_P (type)
4222 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4225 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4228 /* Return the successor of VAL in its type, handling the infinite case. */
4231 range_successor (tree val)
4233 tree type = TREE_TYPE (val);
4235 if (INTEGRAL_TYPE_P (type)
4236 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4239 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4242 /* Given two ranges, see if we can merge them into one. Return 1 if we
4243 can, 0 if we can't. Set the output range into the specified parameters. */
4246 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4247 tree high0, int in1_p, tree low1, tree high1)
4255 int lowequal = ((low0 == 0 && low1 == 0)
4256 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4257 low0, 0, low1, 0)));
4258 int highequal = ((high0 == 0 && high1 == 0)
4259 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4260 high0, 1, high1, 1)));
4262 /* Make range 0 be the range that starts first, or ends last if they
4263 start at the same value. Swap them if it isn't. */
4264 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4267 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4268 high1, 1, high0, 1))))
4270 temp = in0_p, in0_p = in1_p, in1_p = temp;
4271 tem = low0, low0 = low1, low1 = tem;
4272 tem = high0, high0 = high1, high1 = tem;
4275 /* Now flag two cases, whether the ranges are disjoint or whether the
4276 second range is totally subsumed in the first. Note that the tests
4277 below are simplified by the ones above. */
4278 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4279 high0, 1, low1, 0));
4280 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4281 high1, 1, high0, 1));
4283 /* We now have four cases, depending on whether we are including or
4284 excluding the two ranges. */
4287 /* If they don't overlap, the result is false. If the second range
4288 is a subset it is the result. Otherwise, the range is from the start
4289 of the second to the end of the first. */
4291 in_p = 0, low = high = 0;
4293 in_p = 1, low = low1, high = high1;
4295 in_p = 1, low = low1, high = high0;
4298 else if (in0_p && ! in1_p)
4300 /* If they don't overlap, the result is the first range. If they are
4301 equal, the result is false. If the second range is a subset of the
4302 first, and the ranges begin at the same place, we go from just after
4303 the end of the second range to the end of the first. If the second
4304 range is not a subset of the first, or if it is a subset and both
4305 ranges end at the same place, the range starts at the start of the
4306 first range and ends just before the second range.
4307 Otherwise, we can't describe this as a single range. */
4309 in_p = 1, low = low0, high = high0;
4310 else if (lowequal && highequal)
4311 in_p = 0, low = high = 0;
4312 else if (subset && lowequal)
4314 low = range_successor (high1);
4318 else if (! subset || highequal)
4321 high = range_predecessor (low1);
4328 else if (! in0_p && in1_p)
4330 /* If they don't overlap, the result is the second range. If the second
4331 is a subset of the first, the result is false. Otherwise,
4332 the range starts just after the first range and ends at the
4333 end of the second. */
4335 in_p = 1, low = low1, high = high1;
4336 else if (subset || highequal)
4337 in_p = 0, low = high = 0;
4340 low = range_successor (high0);
4348 /* The case where we are excluding both ranges. Here the complex case
4349 is if they don't overlap. In that case, the only time we have a
4350 range is if they are adjacent. If the second is a subset of the
4351 first, the result is the first. Otherwise, the range to exclude
4352 starts at the beginning of the first range and ends at the end of the
4356 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4357 range_successor (high0),
4359 in_p = 0, low = low0, high = high1;
4362 /* Canonicalize - [min, x] into - [-, x]. */
4363 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4364 switch (TREE_CODE (TREE_TYPE (low0)))
4367 if (TYPE_PRECISION (TREE_TYPE (low0))
4368 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4372 if (tree_int_cst_equal (low0,
4373 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4377 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4378 && integer_zerop (low0))
4385 /* Canonicalize - [x, max] into - [x, -]. */
4386 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4387 switch (TREE_CODE (TREE_TYPE (high1)))
4390 if (TYPE_PRECISION (TREE_TYPE (high1))
4391 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4395 if (tree_int_cst_equal (high1,
4396 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4400 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4401 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4403 integer_one_node, 1)))
4410 /* The ranges might be also adjacent between the maximum and
4411 minimum values of the given type. For
4412 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4413 return + [x + 1, y - 1]. */
4414 if (low0 == 0 && high1 == 0)
4416 low = range_successor (high0);
4417 high = range_predecessor (low1);
4418 if (low == 0 || high == 0)
4428 in_p = 0, low = low0, high = high0;
4430 in_p = 0, low = low0, high = high1;
4433 *pin_p = in_p, *plow = low, *phigh = high;
4438 /* Subroutine of fold, looking inside expressions of the form
4439 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4440 of the COND_EXPR. This function is being used also to optimize
4441 A op B ? C : A, by reversing the comparison first.
4443 Return a folded expression whose code is not a COND_EXPR
4444 anymore, or NULL_TREE if no folding opportunity is found. */
4447 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4449 enum tree_code comp_code = TREE_CODE (arg0);
4450 tree arg00 = TREE_OPERAND (arg0, 0);
4451 tree arg01 = TREE_OPERAND (arg0, 1);
4452 tree arg1_type = TREE_TYPE (arg1);
4458 /* If we have A op 0 ? A : -A, consider applying the following
4461 A == 0? A : -A same as -A
4462 A != 0? A : -A same as A
4463 A >= 0? A : -A same as abs (A)
4464 A > 0? A : -A same as abs (A)
4465 A <= 0? A : -A same as -abs (A)
4466 A < 0? A : -A same as -abs (A)
4468 None of these transformations work for modes with signed
4469 zeros. If A is +/-0, the first two transformations will
4470 change the sign of the result (from +0 to -0, or vice
4471 versa). The last four will fix the sign of the result,
4472 even though the original expressions could be positive or
4473 negative, depending on the sign of A.
4475 Note that all these transformations are correct if A is
4476 NaN, since the two alternatives (A and -A) are also NaNs. */
4477 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4478 ? real_zerop (arg01)
4479 : integer_zerop (arg01))
4480 && ((TREE_CODE (arg2) == NEGATE_EXPR
4481 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4482 /* In the case that A is of the form X-Y, '-A' (arg2) may
4483 have already been folded to Y-X, check for that. */
4484 || (TREE_CODE (arg1) == MINUS_EXPR
4485 && TREE_CODE (arg2) == MINUS_EXPR
4486 && operand_equal_p (TREE_OPERAND (arg1, 0),
4487 TREE_OPERAND (arg2, 1), 0)
4488 && operand_equal_p (TREE_OPERAND (arg1, 1),
4489 TREE_OPERAND (arg2, 0), 0))))
4494 tem = fold_convert (arg1_type, arg1);
4495 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4498 return pedantic_non_lvalue (fold_convert (type, arg1));
4501 if (flag_trapping_math)
4506 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4507 arg1 = fold_convert (lang_hooks.types.signed_type
4508 (TREE_TYPE (arg1)), arg1);
4509 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4510 return pedantic_non_lvalue (fold_convert (type, tem));
4513 if (flag_trapping_math)
4517 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4518 arg1 = fold_convert (lang_hooks.types.signed_type
4519 (TREE_TYPE (arg1)), arg1);
4520 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4521 return negate_expr (fold_convert (type, tem));
4523 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4527 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4528 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4529 both transformations are correct when A is NaN: A != 0
4530 is then true, and A == 0 is false. */
4532 if (integer_zerop (arg01) && integer_zerop (arg2))
4534 if (comp_code == NE_EXPR)
4535 return pedantic_non_lvalue (fold_convert (type, arg1));
4536 else if (comp_code == EQ_EXPR)
4537 return build_int_cst (type, 0);
4540 /* Try some transformations of A op B ? A : B.
4542 A == B? A : B same as B
4543 A != B? A : B same as A
4544 A >= B? A : B same as max (A, B)
4545 A > B? A : B same as max (B, A)
4546 A <= B? A : B same as min (A, B)
4547 A < B? A : B same as min (B, A)
4549 As above, these transformations don't work in the presence
4550 of signed zeros. For example, if A and B are zeros of
4551 opposite sign, the first two transformations will change
4552 the sign of the result. In the last four, the original
4553 expressions give different results for (A=+0, B=-0) and
4554 (A=-0, B=+0), but the transformed expressions do not.
4556 The first two transformations are correct if either A or B
4557 is a NaN. In the first transformation, the condition will
4558 be false, and B will indeed be chosen. In the case of the
4559 second transformation, the condition A != B will be true,
4560 and A will be chosen.
4562 The conversions to max() and min() are not correct if B is
4563 a number and A is not. The conditions in the original
4564 expressions will be false, so all four give B. The min()
4565 and max() versions would give a NaN instead. */
4566 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4567 /* Avoid these transformations if the COND_EXPR may be used
4568 as an lvalue in the C++ front-end. PR c++/19199. */
4570 || (strcmp (lang_hooks.name, "GNU C++") != 0
4571 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4572 || ! maybe_lvalue_p (arg1)
4573 || ! maybe_lvalue_p (arg2)))
4575 tree comp_op0 = arg00;
4576 tree comp_op1 = arg01;
4577 tree comp_type = TREE_TYPE (comp_op0);
4579 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4580 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4590 return pedantic_non_lvalue (fold_convert (type, arg2));
4592 return pedantic_non_lvalue (fold_convert (type, arg1));
4597 /* In C++ a ?: expression can be an lvalue, so put the
4598 operand which will be used if they are equal first
4599 so that we can convert this back to the
4600 corresponding COND_EXPR. */
4601 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4603 comp_op0 = fold_convert (comp_type, comp_op0);
4604 comp_op1 = fold_convert (comp_type, comp_op1);
4605 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4606 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4607 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4608 return pedantic_non_lvalue (fold_convert (type, tem));
4615 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4617 comp_op0 = fold_convert (comp_type, comp_op0);
4618 comp_op1 = fold_convert (comp_type, comp_op1);
4619 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4620 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4621 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4622 return pedantic_non_lvalue (fold_convert (type, tem));
4626 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4627 return pedantic_non_lvalue (fold_convert (type, arg2));
4630 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4631 return pedantic_non_lvalue (fold_convert (type, arg1));
4634 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4639 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4640 we might still be able to simplify this. For example,
4641 if C1 is one less or one more than C2, this might have started
4642 out as a MIN or MAX and been transformed by this function.
4643 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4645 if (INTEGRAL_TYPE_P (type)
4646 && TREE_CODE (arg01) == INTEGER_CST
4647 && TREE_CODE (arg2) == INTEGER_CST)
4651 /* We can replace A with C1 in this case. */
4652 arg1 = fold_convert (type, arg01);
4653 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4656 /* If C1 is C2 + 1, this is min(A, C2). */
4657 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4659 && operand_equal_p (arg01,
4660 const_binop (PLUS_EXPR, arg2,
4661 build_int_cst (type, 1), 0),
4663 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4668 /* If C1 is C2 - 1, this is min(A, C2). */
4669 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4671 && operand_equal_p (arg01,
4672 const_binop (MINUS_EXPR, arg2,
4673 build_int_cst (type, 1), 0),
4675 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4680 /* If C1 is C2 - 1, this is max(A, C2). */
4681 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4683 && operand_equal_p (arg01,
4684 const_binop (MINUS_EXPR, arg2,
4685 build_int_cst (type, 1), 0),
4687 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4692 /* If C1 is C2 + 1, this is max(A, C2). */
4693 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4695 && operand_equal_p (arg01,
4696 const_binop (PLUS_EXPR, arg2,
4697 build_int_cst (type, 1), 0),
4699 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4713 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4714 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4717 /* EXP is some logical combination of boolean tests. See if we can
4718 merge it into some range test. Return the new tree if so. */
4721 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4723 int or_op = (code == TRUTH_ORIF_EXPR
4724 || code == TRUTH_OR_EXPR);
4725 int in0_p, in1_p, in_p;
4726 tree low0, low1, low, high0, high1, high;
4727 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4728 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4731 /* If this is an OR operation, invert both sides; we will invert
4732 again at the end. */
4734 in0_p = ! in0_p, in1_p = ! in1_p;
4736 /* If both expressions are the same, if we can merge the ranges, and we
4737 can build the range test, return it or it inverted. If one of the
4738 ranges is always true or always false, consider it to be the same
4739 expression as the other. */
4740 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4741 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4743 && 0 != (tem = (build_range_check (type,
4745 : rhs != 0 ? rhs : integer_zero_node,
4747 return or_op ? invert_truthvalue (tem) : tem;
4749 /* On machines where the branch cost is expensive, if this is a
4750 short-circuited branch and the underlying object on both sides
4751 is the same, make a non-short-circuit operation. */
4752 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4753 && lhs != 0 && rhs != 0
4754 && (code == TRUTH_ANDIF_EXPR
4755 || code == TRUTH_ORIF_EXPR)
4756 && operand_equal_p (lhs, rhs, 0))
4758 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4759 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4760 which cases we can't do this. */
4761 if (simple_operand_p (lhs))
4762 return build2 (code == TRUTH_ANDIF_EXPR
4763 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4766 else if (lang_hooks.decls.global_bindings_p () == 0
4767 && ! CONTAINS_PLACEHOLDER_P (lhs))
4769 tree common = save_expr (lhs);
4771 if (0 != (lhs = build_range_check (type, common,
4772 or_op ? ! in0_p : in0_p,
4774 && (0 != (rhs = build_range_check (type, common,
4775 or_op ? ! in1_p : in1_p,
4777 return build2 (code == TRUTH_ANDIF_EXPR
4778 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4786 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4787 bit value. Arrange things so the extra bits will be set to zero if and
4788 only if C is signed-extended to its full width. If MASK is nonzero,
4789 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4792 unextend (tree c, int p, int unsignedp, tree mask)
4794 tree type = TREE_TYPE (c);
4795 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4798 if (p == modesize || unsignedp)
4801 /* We work by getting just the sign bit into the low-order bit, then
4802 into the high-order bit, then sign-extend. We then XOR that value
4804 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4805 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4807 /* We must use a signed type in order to get an arithmetic right shift.
4808 However, we must also avoid introducing accidental overflows, so that
4809 a subsequent call to integer_zerop will work. Hence we must
4810 do the type conversion here. At this point, the constant is either
4811 zero or one, and the conversion to a signed type can never overflow.
4812 We could get an overflow if this conversion is done anywhere else. */
4813 if (TYPE_UNSIGNED (type))
4814 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4816 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4817 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4819 temp = const_binop (BIT_AND_EXPR, temp,
4820 fold_convert (TREE_TYPE (c), mask), 0);
4821 /* If necessary, convert the type back to match the type of C. */
4822 if (TYPE_UNSIGNED (type))
4823 temp = fold_convert (type, temp);
4825 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4828 /* Find ways of folding logical expressions of LHS and RHS:
4829 Try to merge two comparisons to the same innermost item.
4830 Look for range tests like "ch >= '0' && ch <= '9'".
4831 Look for combinations of simple terms on machines with expensive branches
4832 and evaluate the RHS unconditionally.
4834 For example, if we have p->a == 2 && p->b == 4 and we can make an
4835 object large enough to span both A and B, we can do this with a comparison
4836 against the object ANDed with the a mask.
4838 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4839 operations to do this with one comparison.
4841 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4842 function and the one above.
4844 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4845 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4847 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4850 We return the simplified tree or 0 if no optimization is possible. */
4853 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4855 /* If this is the "or" of two comparisons, we can do something if
4856 the comparisons are NE_EXPR. If this is the "and", we can do something
4857 if the comparisons are EQ_EXPR. I.e.,
4858 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4860 WANTED_CODE is this operation code. For single bit fields, we can
4861 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4862 comparison for one-bit fields. */
4864 enum tree_code wanted_code;
4865 enum tree_code lcode, rcode;
4866 tree ll_arg, lr_arg, rl_arg, rr_arg;
4867 tree ll_inner, lr_inner, rl_inner, rr_inner;
4868 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4869 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4870 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4871 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4872 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4873 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4874 enum machine_mode lnmode, rnmode;
4875 tree ll_mask, lr_mask, rl_mask, rr_mask;
4876 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4877 tree l_const, r_const;
4878 tree lntype, rntype, result;
4879 int first_bit, end_bit;
4881 tree orig_lhs = lhs, orig_rhs = rhs;
4882 enum tree_code orig_code = code;
4884 /* Start by getting the comparison codes. Fail if anything is volatile.
4885 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4886 it were surrounded with a NE_EXPR. */
4888 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4891 lcode = TREE_CODE (lhs);
4892 rcode = TREE_CODE (rhs);
4894 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4896 lhs = build2 (NE_EXPR, truth_type, lhs,
4897 build_int_cst (TREE_TYPE (lhs), 0));
4901 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4903 rhs = build2 (NE_EXPR, truth_type, rhs,
4904 build_int_cst (TREE_TYPE (rhs), 0));
4908 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4909 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4912 ll_arg = TREE_OPERAND (lhs, 0);
4913 lr_arg = TREE_OPERAND (lhs, 1);
4914 rl_arg = TREE_OPERAND (rhs, 0);
4915 rr_arg = TREE_OPERAND (rhs, 1);
4917 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4918 if (simple_operand_p (ll_arg)
4919 && simple_operand_p (lr_arg))
4922 if (operand_equal_p (ll_arg, rl_arg, 0)
4923 && operand_equal_p (lr_arg, rr_arg, 0))
4925 result = combine_comparisons (code, lcode, rcode,
4926 truth_type, ll_arg, lr_arg);
4930 else if (operand_equal_p (ll_arg, rr_arg, 0)
4931 && operand_equal_p (lr_arg, rl_arg, 0))
4933 result = combine_comparisons (code, lcode,
4934 swap_tree_comparison (rcode),
4935 truth_type, ll_arg, lr_arg);
4941 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4942 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4944 /* If the RHS can be evaluated unconditionally and its operands are
4945 simple, it wins to evaluate the RHS unconditionally on machines
4946 with expensive branches. In this case, this isn't a comparison
4947 that can be merged. Avoid doing this if the RHS is a floating-point
4948 comparison since those can trap. */
4950 if (BRANCH_COST >= 2
4951 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4952 && simple_operand_p (rl_arg)
4953 && simple_operand_p (rr_arg))
4955 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4956 if (code == TRUTH_OR_EXPR
4957 && lcode == NE_EXPR && integer_zerop (lr_arg)
4958 && rcode == NE_EXPR && integer_zerop (rr_arg)
4959 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4960 return build2 (NE_EXPR, truth_type,
4961 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4963 build_int_cst (TREE_TYPE (ll_arg), 0));
4965 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4966 if (code == TRUTH_AND_EXPR
4967 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4968 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4969 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4970 return build2 (EQ_EXPR, truth_type,
4971 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4973 build_int_cst (TREE_TYPE (ll_arg), 0));
4975 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4977 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
4978 return build2 (code, truth_type, lhs, rhs);
4983 /* See if the comparisons can be merged. Then get all the parameters for
4986 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4987 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4991 ll_inner = decode_field_reference (ll_arg,
4992 &ll_bitsize, &ll_bitpos, &ll_mode,
4993 &ll_unsignedp, &volatilep, &ll_mask,
4995 lr_inner = decode_field_reference (lr_arg,
4996 &lr_bitsize, &lr_bitpos, &lr_mode,
4997 &lr_unsignedp, &volatilep, &lr_mask,
4999 rl_inner = decode_field_reference (rl_arg,
5000 &rl_bitsize, &rl_bitpos, &rl_mode,
5001 &rl_unsignedp, &volatilep, &rl_mask,
5003 rr_inner = decode_field_reference (rr_arg,
5004 &rr_bitsize, &rr_bitpos, &rr_mode,
5005 &rr_unsignedp, &volatilep, &rr_mask,
5008 /* It must be true that the inner operation on the lhs of each
5009 comparison must be the same if we are to be able to do anything.
5010 Then see if we have constants. If not, the same must be true for
5012 if (volatilep || ll_inner == 0 || rl_inner == 0
5013 || ! operand_equal_p (ll_inner, rl_inner, 0))
5016 if (TREE_CODE (lr_arg) == INTEGER_CST
5017 && TREE_CODE (rr_arg) == INTEGER_CST)
5018 l_const = lr_arg, r_const = rr_arg;
5019 else if (lr_inner == 0 || rr_inner == 0
5020 || ! operand_equal_p (lr_inner, rr_inner, 0))
5023 l_const = r_const = 0;
5025 /* If either comparison code is not correct for our logical operation,
5026 fail. However, we can convert a one-bit comparison against zero into
5027 the opposite comparison against that bit being set in the field. */
5029 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5030 if (lcode != wanted_code)
5032 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5034 /* Make the left operand unsigned, since we are only interested
5035 in the value of one bit. Otherwise we are doing the wrong
5044 /* This is analogous to the code for l_const above. */
5045 if (rcode != wanted_code)
5047 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5056 /* After this point all optimizations will generate bit-field
5057 references, which we might not want. */
5058 if (! lang_hooks.can_use_bit_fields_p ())
5061 /* See if we can find a mode that contains both fields being compared on
5062 the left. If we can't, fail. Otherwise, update all constants and masks
5063 to be relative to a field of that size. */
5064 first_bit = MIN (ll_bitpos, rl_bitpos);
5065 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5066 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5067 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5069 if (lnmode == VOIDmode)
5072 lnbitsize = GET_MODE_BITSIZE (lnmode);
5073 lnbitpos = first_bit & ~ (lnbitsize - 1);
5074 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5075 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5077 if (BYTES_BIG_ENDIAN)
5079 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5080 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5083 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5084 size_int (xll_bitpos), 0);
5085 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5086 size_int (xrl_bitpos), 0);
5090 l_const = fold_convert (lntype, l_const);
5091 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5092 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5093 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5094 fold_build1 (BIT_NOT_EXPR,
5098 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5100 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5105 r_const = fold_convert (lntype, r_const);
5106 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5107 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5108 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5109 fold_build1 (BIT_NOT_EXPR,
5113 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5115 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5119 /* If the right sides are not constant, do the same for it. Also,
5120 disallow this optimization if a size or signedness mismatch occurs
5121 between the left and right sides. */
5124 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5125 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5126 /* Make sure the two fields on the right
5127 correspond to the left without being swapped. */
5128 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5131 first_bit = MIN (lr_bitpos, rr_bitpos);
5132 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5133 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5134 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5136 if (rnmode == VOIDmode)
5139 rnbitsize = GET_MODE_BITSIZE (rnmode);
5140 rnbitpos = first_bit & ~ (rnbitsize - 1);
5141 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5142 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5144 if (BYTES_BIG_ENDIAN)
5146 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5147 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5150 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5151 size_int (xlr_bitpos), 0);
5152 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5153 size_int (xrr_bitpos), 0);
5155 /* Make a mask that corresponds to both fields being compared.
5156 Do this for both items being compared. If the operands are the
5157 same size and the bits being compared are in the same position
5158 then we can do this by masking both and comparing the masked
5160 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5161 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5162 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5164 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5165 ll_unsignedp || rl_unsignedp);
5166 if (! all_ones_mask_p (ll_mask, lnbitsize))
5167 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5169 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5170 lr_unsignedp || rr_unsignedp);
5171 if (! all_ones_mask_p (lr_mask, rnbitsize))
5172 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5174 return build2 (wanted_code, truth_type, lhs, rhs);
5177 /* There is still another way we can do something: If both pairs of
5178 fields being compared are adjacent, we may be able to make a wider
5179 field containing them both.
5181 Note that we still must mask the lhs/rhs expressions. Furthermore,
5182 the mask must be shifted to account for the shift done by
5183 make_bit_field_ref. */
5184 if ((ll_bitsize + ll_bitpos == rl_bitpos
5185 && lr_bitsize + lr_bitpos == rr_bitpos)
5186 || (ll_bitpos == rl_bitpos + rl_bitsize
5187 && lr_bitpos == rr_bitpos + rr_bitsize))
5191 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5192 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5193 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5194 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5196 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5197 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5198 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5199 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5201 /* Convert to the smaller type before masking out unwanted bits. */
5203 if (lntype != rntype)
5205 if (lnbitsize > rnbitsize)
5207 lhs = fold_convert (rntype, lhs);
5208 ll_mask = fold_convert (rntype, ll_mask);
5211 else if (lnbitsize < rnbitsize)
5213 rhs = fold_convert (lntype, rhs);
5214 lr_mask = fold_convert (lntype, lr_mask);
5219 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5220 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5222 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5223 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5225 return build2 (wanted_code, truth_type, lhs, rhs);
5231 /* Handle the case of comparisons with constants. If there is something in
5232 common between the masks, those bits of the constants must be the same.
5233 If not, the condition is always false. Test for this to avoid generating
5234 incorrect code below. */
5235 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5236 if (! integer_zerop (result)
5237 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5238 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5240 if (wanted_code == NE_EXPR)
5242 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5243 return constant_boolean_node (true, truth_type);
5247 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5248 return constant_boolean_node (false, truth_type);
5252 /* Construct the expression we will return. First get the component
5253 reference we will make. Unless the mask is all ones the width of
5254 that field, perform the mask operation. Then compare with the
5256 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5257 ll_unsignedp || rl_unsignedp);
5259 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5260 if (! all_ones_mask_p (ll_mask, lnbitsize))
5261 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5263 return build2 (wanted_code, truth_type, result,
5264 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5267 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5271 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5274 enum tree_code op_code;
5275 tree comp_const = op1;
5277 int consts_equal, consts_lt;
5280 STRIP_SIGN_NOPS (arg0);
5282 op_code = TREE_CODE (arg0);
5283 minmax_const = TREE_OPERAND (arg0, 1);
5284 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5285 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5286 inner = TREE_OPERAND (arg0, 0);
5288 /* If something does not permit us to optimize, return the original tree. */
5289 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5290 || TREE_CODE (comp_const) != INTEGER_CST
5291 || TREE_CONSTANT_OVERFLOW (comp_const)
5292 || TREE_CODE (minmax_const) != INTEGER_CST
5293 || TREE_CONSTANT_OVERFLOW (minmax_const))
5296 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5297 and GT_EXPR, doing the rest with recursive calls using logical
5301 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5303 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5306 return invert_truthvalue (tem);
5312 fold_build2 (TRUTH_ORIF_EXPR, type,
5313 optimize_minmax_comparison
5314 (EQ_EXPR, type, arg0, comp_const),
5315 optimize_minmax_comparison
5316 (GT_EXPR, type, arg0, comp_const));
5319 if (op_code == MAX_EXPR && consts_equal)
5320 /* MAX (X, 0) == 0 -> X <= 0 */
5321 return fold_build2 (LE_EXPR, type, inner, comp_const);
5323 else if (op_code == MAX_EXPR && consts_lt)
5324 /* MAX (X, 0) == 5 -> X == 5 */
5325 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5327 else if (op_code == MAX_EXPR)
5328 /* MAX (X, 0) == -1 -> false */
5329 return omit_one_operand (type, integer_zero_node, inner);
5331 else if (consts_equal)
5332 /* MIN (X, 0) == 0 -> X >= 0 */
5333 return fold_build2 (GE_EXPR, type, inner, comp_const);
5336 /* MIN (X, 0) == 5 -> false */
5337 return omit_one_operand (type, integer_zero_node, inner);
5340 /* MIN (X, 0) == -1 -> X == -1 */
5341 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5344 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5345 /* MAX (X, 0) > 0 -> X > 0
5346 MAX (X, 0) > 5 -> X > 5 */
5347 return fold_build2 (GT_EXPR, type, inner, comp_const);
5349 else if (op_code == MAX_EXPR)
5350 /* MAX (X, 0) > -1 -> true */
5351 return omit_one_operand (type, integer_one_node, inner);
5353 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5354 /* MIN (X, 0) > 0 -> false
5355 MIN (X, 0) > 5 -> false */
5356 return omit_one_operand (type, integer_zero_node, inner);
5359 /* MIN (X, 0) > -1 -> X > -1 */
5360 return fold_build2 (GT_EXPR, type, inner, comp_const);
5367 /* T is an integer expression that is being multiplied, divided, or taken a
5368 modulus (CODE says which and what kind of divide or modulus) by a
5369 constant C. See if we can eliminate that operation by folding it with
5370 other operations already in T. WIDE_TYPE, if non-null, is a type that
5371 should be used for the computation if wider than our type.
5373 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5374 (X * 2) + (Y * 4). We must, however, be assured that either the original
5375 expression would not overflow or that overflow is undefined for the type
5376 in the language in question.
5378 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5379 the machine has a multiply-accumulate insn or that this is part of an
5380 addressing calculation.
5382 If we return a non-null expression, it is an equivalent form of the
5383 original computation, but need not be in the original type. */
5386 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5388 /* To avoid exponential search depth, refuse to allow recursion past
5389 three levels. Beyond that (1) it's highly unlikely that we'll find
5390 something interesting and (2) we've probably processed it before
5391 when we built the inner expression. */
5400 ret = extract_muldiv_1 (t, c, code, wide_type);
5407 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5409 tree type = TREE_TYPE (t);
5410 enum tree_code tcode = TREE_CODE (t);
5411 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5412 > GET_MODE_SIZE (TYPE_MODE (type)))
5413 ? wide_type : type);
5415 int same_p = tcode == code;
5416 tree op0 = NULL_TREE, op1 = NULL_TREE;
5418 /* Don't deal with constants of zero here; they confuse the code below. */
5419 if (integer_zerop (c))
5422 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5423 op0 = TREE_OPERAND (t, 0);
5425 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5426 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5428 /* Note that we need not handle conditional operations here since fold
5429 already handles those cases. So just do arithmetic here. */
5433 /* For a constant, we can always simplify if we are a multiply
5434 or (for divide and modulus) if it is a multiple of our constant. */
5435 if (code == MULT_EXPR
5436 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5437 return const_binop (code, fold_convert (ctype, t),
5438 fold_convert (ctype, c), 0);
5441 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5442 /* If op0 is an expression ... */
5443 if ((COMPARISON_CLASS_P (op0)
5444 || UNARY_CLASS_P (op0)
5445 || BINARY_CLASS_P (op0)
5446 || EXPRESSION_CLASS_P (op0))
5447 /* ... and is unsigned, and its type is smaller than ctype,
5448 then we cannot pass through as widening. */
5449 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5450 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5451 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5452 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5453 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5454 /* ... or this is a truncation (t is narrower than op0),
5455 then we cannot pass through this narrowing. */
5456 || (GET_MODE_SIZE (TYPE_MODE (type))
5457 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5458 /* ... or signedness changes for division or modulus,
5459 then we cannot pass through this conversion. */
5460 || (code != MULT_EXPR
5461 && (TYPE_UNSIGNED (ctype)
5462 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5465 /* Pass the constant down and see if we can make a simplification. If
5466 we can, replace this expression with the inner simplification for
5467 possible later conversion to our or some other type. */
5468 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5469 && TREE_CODE (t2) == INTEGER_CST
5470 && ! TREE_CONSTANT_OVERFLOW (t2)
5471 && (0 != (t1 = extract_muldiv (op0, t2, code,
5473 ? ctype : NULL_TREE))))
5478 /* If widening the type changes it from signed to unsigned, then we
5479 must avoid building ABS_EXPR itself as unsigned. */
5480 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5482 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5483 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5485 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5486 return fold_convert (ctype, t1);
5492 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5493 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5496 case MIN_EXPR: case MAX_EXPR:
5497 /* If widening the type changes the signedness, then we can't perform
5498 this optimization as that changes the result. */
5499 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5502 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5503 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5504 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5506 if (tree_int_cst_sgn (c) < 0)
5507 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5509 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5510 fold_convert (ctype, t2));
5514 case LSHIFT_EXPR: case RSHIFT_EXPR:
5515 /* If the second operand is constant, this is a multiplication
5516 or floor division, by a power of two, so we can treat it that
5517 way unless the multiplier or divisor overflows. Signed
5518 left-shift overflow is implementation-defined rather than
5519 undefined in C90, so do not convert signed left shift into
5521 if (TREE_CODE (op1) == INTEGER_CST
5522 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5523 /* const_binop may not detect overflow correctly,
5524 so check for it explicitly here. */
5525 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5526 && TREE_INT_CST_HIGH (op1) == 0
5527 && 0 != (t1 = fold_convert (ctype,
5528 const_binop (LSHIFT_EXPR,
5531 && ! TREE_OVERFLOW (t1))
5532 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5533 ? MULT_EXPR : FLOOR_DIV_EXPR,
5534 ctype, fold_convert (ctype, op0), t1),
5535 c, code, wide_type);
5538 case PLUS_EXPR: case MINUS_EXPR:
5539 /* See if we can eliminate the operation on both sides. If we can, we
5540 can return a new PLUS or MINUS. If we can't, the only remaining
5541 cases where we can do anything are if the second operand is a
5543 t1 = extract_muldiv (op0, c, code, wide_type);
5544 t2 = extract_muldiv (op1, c, code, wide_type);
5545 if (t1 != 0 && t2 != 0
5546 && (code == MULT_EXPR
5547 /* If not multiplication, we can only do this if both operands
5548 are divisible by c. */
5549 || (multiple_of_p (ctype, op0, c)
5550 && multiple_of_p (ctype, op1, c))))
5551 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5552 fold_convert (ctype, t2));
5554 /* If this was a subtraction, negate OP1 and set it to be an addition.
5555 This simplifies the logic below. */
5556 if (tcode == MINUS_EXPR)
5557 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5559 if (TREE_CODE (op1) != INTEGER_CST)
5562 /* If either OP1 or C are negative, this optimization is not safe for
5563 some of the division and remainder types while for others we need
5564 to change the code. */
5565 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5567 if (code == CEIL_DIV_EXPR)
5568 code = FLOOR_DIV_EXPR;
5569 else if (code == FLOOR_DIV_EXPR)
5570 code = CEIL_DIV_EXPR;
5571 else if (code != MULT_EXPR
5572 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5576 /* If it's a multiply or a division/modulus operation of a multiple
5577 of our constant, do the operation and verify it doesn't overflow. */
5578 if (code == MULT_EXPR
5579 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5581 op1 = const_binop (code, fold_convert (ctype, op1),
5582 fold_convert (ctype, c), 0);
5583 /* We allow the constant to overflow with wrapping semantics. */
5585 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5591 /* If we have an unsigned type is not a sizetype, we cannot widen
5592 the operation since it will change the result if the original
5593 computation overflowed. */
5594 if (TYPE_UNSIGNED (ctype)
5595 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5599 /* If we were able to eliminate our operation from the first side,
5600 apply our operation to the second side and reform the PLUS. */
5601 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5602 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5604 /* The last case is if we are a multiply. In that case, we can
5605 apply the distributive law to commute the multiply and addition
5606 if the multiplication of the constants doesn't overflow. */
5607 if (code == MULT_EXPR)
5608 return fold_build2 (tcode, ctype,
5609 fold_build2 (code, ctype,
5610 fold_convert (ctype, op0),
5611 fold_convert (ctype, c)),
5617 /* We have a special case here if we are doing something like
5618 (C * 8) % 4 since we know that's zero. */
5619 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5620 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5621 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5622 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5623 return omit_one_operand (type, integer_zero_node, op0);
5625 /* ... fall through ... */
5627 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5628 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5629 /* If we can extract our operation from the LHS, do so and return a
5630 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5631 do something only if the second operand is a constant. */
5633 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5634 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5635 fold_convert (ctype, op1));
5636 else if (tcode == MULT_EXPR && code == MULT_EXPR
5637 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5638 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5639 fold_convert (ctype, t1));
5640 else if (TREE_CODE (op1) != INTEGER_CST)
5643 /* If these are the same operation types, we can associate them
5644 assuming no overflow. */
5646 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5647 fold_convert (ctype, c), 0))
5648 && ! TREE_OVERFLOW (t1))
5649 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5651 /* If these operations "cancel" each other, we have the main
5652 optimizations of this pass, which occur when either constant is a
5653 multiple of the other, in which case we replace this with either an
5654 operation or CODE or TCODE.
5656 If we have an unsigned type that is not a sizetype, we cannot do
5657 this since it will change the result if the original computation
5659 if ((! TYPE_UNSIGNED (ctype)
5660 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5662 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5663 || (tcode == MULT_EXPR
5664 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5665 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5667 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5668 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5669 fold_convert (ctype,
5670 const_binop (TRUNC_DIV_EXPR,
5672 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5673 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5674 fold_convert (ctype,
5675 const_binop (TRUNC_DIV_EXPR,
5687 /* Return a node which has the indicated constant VALUE (either 0 or
5688 1), and is of the indicated TYPE. */
5691 constant_boolean_node (int value, tree type)
5693 if (type == integer_type_node)
5694 return value ? integer_one_node : integer_zero_node;
5695 else if (type == boolean_type_node)
5696 return value ? boolean_true_node : boolean_false_node;
5698 return build_int_cst (type, value);
5702 /* Return true if expr looks like an ARRAY_REF and set base and
5703 offset to the appropriate trees. If there is no offset,
5704 offset is set to NULL_TREE. Base will be canonicalized to
5705 something you can get the element type from using
5706 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5707 in bytes to the base. */
5710 extract_array_ref (tree expr, tree *base, tree *offset)
5712 /* One canonical form is a PLUS_EXPR with the first
5713 argument being an ADDR_EXPR with a possible NOP_EXPR
5715 if (TREE_CODE (expr) == PLUS_EXPR)
5717 tree op0 = TREE_OPERAND (expr, 0);
5718 tree inner_base, dummy1;
5719 /* Strip NOP_EXPRs here because the C frontends and/or
5720 folders present us (int *)&x.a + 4B possibly. */
5722 if (extract_array_ref (op0, &inner_base, &dummy1))
5725 if (dummy1 == NULL_TREE)
5726 *offset = TREE_OPERAND (expr, 1);
5728 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5729 dummy1, TREE_OPERAND (expr, 1));
5733 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5734 which we transform into an ADDR_EXPR with appropriate
5735 offset. For other arguments to the ADDR_EXPR we assume
5736 zero offset and as such do not care about the ADDR_EXPR
5737 type and strip possible nops from it. */
5738 else if (TREE_CODE (expr) == ADDR_EXPR)
5740 tree op0 = TREE_OPERAND (expr, 0);
5741 if (TREE_CODE (op0) == ARRAY_REF)
5743 tree idx = TREE_OPERAND (op0, 1);
5744 *base = TREE_OPERAND (op0, 0);
5745 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5746 array_ref_element_size (op0));
5750 /* Handle array-to-pointer decay as &a. */
5751 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5752 *base = TREE_OPERAND (expr, 0);
5755 *offset = NULL_TREE;
5759 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5760 else if (SSA_VAR_P (expr)
5761 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5764 *offset = NULL_TREE;
5772 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5773 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5774 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5775 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5776 COND is the first argument to CODE; otherwise (as in the example
5777 given here), it is the second argument. TYPE is the type of the
5778 original expression. Return NULL_TREE if no simplification is
5782 fold_binary_op_with_conditional_arg (enum tree_code code,
5783 tree type, tree op0, tree op1,
5784 tree cond, tree arg, int cond_first_p)
5786 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5787 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5788 tree test, true_value, false_value;
5789 tree lhs = NULL_TREE;
5790 tree rhs = NULL_TREE;
5792 /* This transformation is only worthwhile if we don't have to wrap
5793 arg in a SAVE_EXPR, and the operation can be simplified on at least
5794 one of the branches once its pushed inside the COND_EXPR. */
5795 if (!TREE_CONSTANT (arg))
5798 if (TREE_CODE (cond) == COND_EXPR)
5800 test = TREE_OPERAND (cond, 0);
5801 true_value = TREE_OPERAND (cond, 1);
5802 false_value = TREE_OPERAND (cond, 2);
5803 /* If this operand throws an expression, then it does not make
5804 sense to try to perform a logical or arithmetic operation
5806 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5808 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5813 tree testtype = TREE_TYPE (cond);
5815 true_value = constant_boolean_node (true, testtype);
5816 false_value = constant_boolean_node (false, testtype);
5819 arg = fold_convert (arg_type, arg);
5822 true_value = fold_convert (cond_type, true_value);
5824 lhs = fold_build2 (code, type, true_value, arg);
5826 lhs = fold_build2 (code, type, arg, true_value);
5830 false_value = fold_convert (cond_type, false_value);
5832 rhs = fold_build2 (code, type, false_value, arg);
5834 rhs = fold_build2 (code, type, arg, false_value);
5837 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5838 return fold_convert (type, test);
5842 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5844 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5845 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5846 ADDEND is the same as X.
5848 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5849 and finite. The problematic cases are when X is zero, and its mode
5850 has signed zeros. In the case of rounding towards -infinity,
5851 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5852 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5855 fold_real_zero_addition_p (tree type, tree addend, int negate)
5857 if (!real_zerop (addend))
5860 /* Don't allow the fold with -fsignaling-nans. */
5861 if (HONOR_SNANS (TYPE_MODE (type)))
5864 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5865 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5868 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5869 if (TREE_CODE (addend) == REAL_CST
5870 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5873 /* The mode has signed zeros, and we have to honor their sign.
5874 In this situation, there is only one case we can return true for.
5875 X - 0 is the same as X unless rounding towards -infinity is
5877 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5880 /* Subroutine of fold() that checks comparisons of built-in math
5881 functions against real constants.
5883 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5884 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5885 is the type of the result and ARG0 and ARG1 are the operands of the
5886 comparison. ARG1 must be a TREE_REAL_CST.
5888 The function returns the constant folded tree if a simplification
5889 can be made, and NULL_TREE otherwise. */
5892 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5893 tree type, tree arg0, tree arg1)
5897 if (BUILTIN_SQRT_P (fcode))
5899 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5900 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5902 c = TREE_REAL_CST (arg1);
5903 if (REAL_VALUE_NEGATIVE (c))
5905 /* sqrt(x) < y is always false, if y is negative. */
5906 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5907 return omit_one_operand (type, integer_zero_node, arg);
5909 /* sqrt(x) > y is always true, if y is negative and we
5910 don't care about NaNs, i.e. negative values of x. */
5911 if (code == NE_EXPR || !HONOR_NANS (mode))
5912 return omit_one_operand (type, integer_one_node, arg);
5914 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5915 return fold_build2 (GE_EXPR, type, arg,
5916 build_real (TREE_TYPE (arg), dconst0));
5918 else if (code == GT_EXPR || code == GE_EXPR)
5922 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5923 real_convert (&c2, mode, &c2);
5925 if (REAL_VALUE_ISINF (c2))
5927 /* sqrt(x) > y is x == +Inf, when y is very large. */
5928 if (HONOR_INFINITIES (mode))
5929 return fold_build2 (EQ_EXPR, type, arg,
5930 build_real (TREE_TYPE (arg), c2));
5932 /* sqrt(x) > y is always false, when y is very large
5933 and we don't care about infinities. */
5934 return omit_one_operand (type, integer_zero_node, arg);
5937 /* sqrt(x) > c is the same as x > c*c. */
5938 return fold_build2 (code, type, arg,
5939 build_real (TREE_TYPE (arg), c2));
5941 else if (code == LT_EXPR || code == LE_EXPR)
5945 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5946 real_convert (&c2, mode, &c2);
5948 if (REAL_VALUE_ISINF (c2))
5950 /* sqrt(x) < y is always true, when y is a very large
5951 value and we don't care about NaNs or Infinities. */
5952 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5953 return omit_one_operand (type, integer_one_node, arg);
5955 /* sqrt(x) < y is x != +Inf when y is very large and we
5956 don't care about NaNs. */
5957 if (! HONOR_NANS (mode))
5958 return fold_build2 (NE_EXPR, type, arg,
5959 build_real (TREE_TYPE (arg), c2));
5961 /* sqrt(x) < y is x >= 0 when y is very large and we
5962 don't care about Infinities. */
5963 if (! HONOR_INFINITIES (mode))
5964 return fold_build2 (GE_EXPR, type, arg,
5965 build_real (TREE_TYPE (arg), dconst0));
5967 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5968 if (lang_hooks.decls.global_bindings_p () != 0
5969 || CONTAINS_PLACEHOLDER_P (arg))
5972 arg = save_expr (arg);
5973 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5974 fold_build2 (GE_EXPR, type, arg,
5975 build_real (TREE_TYPE (arg),
5977 fold_build2 (NE_EXPR, type, arg,
5978 build_real (TREE_TYPE (arg),
5982 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5983 if (! HONOR_NANS (mode))
5984 return fold_build2 (code, type, arg,
5985 build_real (TREE_TYPE (arg), c2));
5987 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5988 if (lang_hooks.decls.global_bindings_p () == 0
5989 && ! CONTAINS_PLACEHOLDER_P (arg))
5991 arg = save_expr (arg);
5992 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5993 fold_build2 (GE_EXPR, type, arg,
5994 build_real (TREE_TYPE (arg),
5996 fold_build2 (code, type, arg,
5997 build_real (TREE_TYPE (arg),
6006 /* Subroutine of fold() that optimizes comparisons against Infinities,
6007 either +Inf or -Inf.
6009 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6010 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6011 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6013 The function returns the constant folded tree if a simplification
6014 can be made, and NULL_TREE otherwise. */
6017 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6019 enum machine_mode mode;
6020 REAL_VALUE_TYPE max;
6024 mode = TYPE_MODE (TREE_TYPE (arg0));
6026 /* For negative infinity swap the sense of the comparison. */
6027 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6029 code = swap_tree_comparison (code);
6034 /* x > +Inf is always false, if with ignore sNANs. */
6035 if (HONOR_SNANS (mode))
6037 return omit_one_operand (type, integer_zero_node, arg0);
6040 /* x <= +Inf is always true, if we don't case about NaNs. */
6041 if (! HONOR_NANS (mode))
6042 return omit_one_operand (type, integer_one_node, arg0);
6044 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6045 if (lang_hooks.decls.global_bindings_p () == 0
6046 && ! CONTAINS_PLACEHOLDER_P (arg0))
6048 arg0 = save_expr (arg0);
6049 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6055 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6056 real_maxval (&max, neg, mode);
6057 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6058 arg0, build_real (TREE_TYPE (arg0), max));
6061 /* x < +Inf is always equal to x <= DBL_MAX. */
6062 real_maxval (&max, neg, mode);
6063 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6064 arg0, build_real (TREE_TYPE (arg0), max));
6067 /* x != +Inf is always equal to !(x > DBL_MAX). */
6068 real_maxval (&max, neg, mode);
6069 if (! HONOR_NANS (mode))
6070 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6071 arg0, build_real (TREE_TYPE (arg0), max));
6073 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6074 arg0, build_real (TREE_TYPE (arg0), max));
6075 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6084 /* Subroutine of fold() that optimizes comparisons of a division by
6085 a nonzero integer constant against an integer constant, i.e.
6088 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6089 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6090 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6092 The function returns the constant folded tree if a simplification
6093 can be made, and NULL_TREE otherwise. */
6096 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6098 tree prod, tmp, hi, lo;
6099 tree arg00 = TREE_OPERAND (arg0, 0);
6100 tree arg01 = TREE_OPERAND (arg0, 1);
6101 unsigned HOST_WIDE_INT lpart;
6102 HOST_WIDE_INT hpart;
6103 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6107 /* We have to do this the hard way to detect unsigned overflow.
6108 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6109 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6110 TREE_INT_CST_HIGH (arg01),
6111 TREE_INT_CST_LOW (arg1),
6112 TREE_INT_CST_HIGH (arg1),
6113 &lpart, &hpart, unsigned_p);
6114 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6115 prod = force_fit_type (prod, -1, overflow, false);
6116 neg_overflow = false;
6120 tmp = int_const_binop (MINUS_EXPR, arg01,
6121 build_int_cst (TREE_TYPE (arg01), 1), 0);
6124 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6125 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6126 TREE_INT_CST_HIGH (prod),
6127 TREE_INT_CST_LOW (tmp),
6128 TREE_INT_CST_HIGH (tmp),
6129 &lpart, &hpart, unsigned_p);
6130 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6131 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6132 TREE_CONSTANT_OVERFLOW (prod));
6134 else if (tree_int_cst_sgn (arg01) >= 0)
6136 tmp = int_const_binop (MINUS_EXPR, arg01,
6137 build_int_cst (TREE_TYPE (arg01), 1), 0);
6138 switch (tree_int_cst_sgn (arg1))
6141 neg_overflow = true;
6142 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6147 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6152 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6162 /* A negative divisor reverses the relational operators. */
6163 code = swap_tree_comparison (code);
6165 tmp = int_const_binop (PLUS_EXPR, arg01,
6166 build_int_cst (TREE_TYPE (arg01), 1), 0);
6167 switch (tree_int_cst_sgn (arg1))
6170 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6175 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6180 neg_overflow = true;
6181 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6193 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6194 return omit_one_operand (type, integer_zero_node, arg00);
6195 if (TREE_OVERFLOW (hi))
6196 return fold_build2 (GE_EXPR, type, arg00, lo);
6197 if (TREE_OVERFLOW (lo))
6198 return fold_build2 (LE_EXPR, type, arg00, hi);
6199 return build_range_check (type, arg00, 1, lo, hi);
6202 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6203 return omit_one_operand (type, integer_one_node, arg00);
6204 if (TREE_OVERFLOW (hi))
6205 return fold_build2 (LT_EXPR, type, arg00, lo);
6206 if (TREE_OVERFLOW (lo))
6207 return fold_build2 (GT_EXPR, type, arg00, hi);
6208 return build_range_check (type, arg00, 0, lo, hi);
6211 if (TREE_OVERFLOW (lo))
6213 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6214 return omit_one_operand (type, tmp, arg00);
6216 return fold_build2 (LT_EXPR, type, arg00, lo);
6219 if (TREE_OVERFLOW (hi))
6221 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6222 return omit_one_operand (type, tmp, arg00);
6224 return fold_build2 (LE_EXPR, type, arg00, hi);
6227 if (TREE_OVERFLOW (hi))
6229 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6230 return omit_one_operand (type, tmp, arg00);
6232 return fold_build2 (GT_EXPR, type, arg00, hi);
6235 if (TREE_OVERFLOW (lo))
6237 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6238 return omit_one_operand (type, tmp, arg00);
6240 return fold_build2 (GE_EXPR, type, arg00, lo);
6250 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6251 equality/inequality test, then return a simplified form of the test
6252 using a sign testing. Otherwise return NULL. TYPE is the desired
6256 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6259 /* If this is testing a single bit, we can optimize the test. */
6260 if ((code == NE_EXPR || code == EQ_EXPR)
6261 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6262 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6264 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6265 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6266 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6268 if (arg00 != NULL_TREE
6269 /* This is only a win if casting to a signed type is cheap,
6270 i.e. when arg00's type is not a partial mode. */
6271 && TYPE_PRECISION (TREE_TYPE (arg00))
6272 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6274 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6275 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6276 result_type, fold_convert (stype, arg00),
6277 build_int_cst (stype, 0));
6284 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6285 equality/inequality test, then return a simplified form of
6286 the test using shifts and logical operations. Otherwise return
6287 NULL. TYPE is the desired result type. */
6290 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6293 /* If this is testing a single bit, we can optimize the test. */
6294 if ((code == NE_EXPR || code == EQ_EXPR)
6295 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6296 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6298 tree inner = TREE_OPERAND (arg0, 0);
6299 tree type = TREE_TYPE (arg0);
6300 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6301 enum machine_mode operand_mode = TYPE_MODE (type);
6303 tree signed_type, unsigned_type, intermediate_type;
6306 /* First, see if we can fold the single bit test into a sign-bit
6308 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6313 /* Otherwise we have (A & C) != 0 where C is a single bit,
6314 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6315 Similarly for (A & C) == 0. */
6317 /* If INNER is a right shift of a constant and it plus BITNUM does
6318 not overflow, adjust BITNUM and INNER. */
6319 if (TREE_CODE (inner) == RSHIFT_EXPR
6320 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6321 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6322 && bitnum < TYPE_PRECISION (type)
6323 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6324 bitnum - TYPE_PRECISION (type)))
6326 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6327 inner = TREE_OPERAND (inner, 0);
6330 /* If we are going to be able to omit the AND below, we must do our
6331 operations as unsigned. If we must use the AND, we have a choice.
6332 Normally unsigned is faster, but for some machines signed is. */
6333 #ifdef LOAD_EXTEND_OP
6334 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6335 && !flag_syntax_only) ? 0 : 1;
6340 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6341 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6342 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6343 inner = fold_convert (intermediate_type, inner);
6346 inner = build2 (RSHIFT_EXPR, intermediate_type,
6347 inner, size_int (bitnum));
6349 one = build_int_cst (intermediate_type, 1);
6351 if (code == EQ_EXPR)
6352 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6354 /* Put the AND last so it can combine with more things. */
6355 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6357 /* Make sure to return the proper type. */
6358 inner = fold_convert (result_type, inner);
6365 /* Check whether we are allowed to reorder operands arg0 and arg1,
6366 such that the evaluation of arg1 occurs before arg0. */
6369 reorder_operands_p (tree arg0, tree arg1)
6371 if (! flag_evaluation_order)
6373 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6375 return ! TREE_SIDE_EFFECTS (arg0)
6376 && ! TREE_SIDE_EFFECTS (arg1);
6379 /* Test whether it is preferable two swap two operands, ARG0 and
6380 ARG1, for example because ARG0 is an integer constant and ARG1
6381 isn't. If REORDER is true, only recommend swapping if we can
6382 evaluate the operands in reverse order. */
6385 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6387 STRIP_SIGN_NOPS (arg0);
6388 STRIP_SIGN_NOPS (arg1);
6390 if (TREE_CODE (arg1) == INTEGER_CST)
6392 if (TREE_CODE (arg0) == INTEGER_CST)
6395 if (TREE_CODE (arg1) == REAL_CST)
6397 if (TREE_CODE (arg0) == REAL_CST)
6400 if (TREE_CODE (arg1) == COMPLEX_CST)
6402 if (TREE_CODE (arg0) == COMPLEX_CST)
6405 if (TREE_CONSTANT (arg1))
6407 if (TREE_CONSTANT (arg0))
6413 if (reorder && flag_evaluation_order
6414 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6422 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6423 for commutative and comparison operators. Ensuring a canonical
6424 form allows the optimizers to find additional redundancies without
6425 having to explicitly check for both orderings. */
6426 if (TREE_CODE (arg0) == SSA_NAME
6427 && TREE_CODE (arg1) == SSA_NAME
6428 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6434 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6435 ARG0 is extended to a wider type. */
6438 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6440 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6442 tree shorter_type, outer_type;
6446 if (arg0_unw == arg0)
6448 shorter_type = TREE_TYPE (arg0_unw);
6450 #ifdef HAVE_canonicalize_funcptr_for_compare
6451 /* Disable this optimization if we're casting a function pointer
6452 type on targets that require function pointer canonicalization. */
6453 if (HAVE_canonicalize_funcptr_for_compare
6454 && TREE_CODE (shorter_type) == POINTER_TYPE
6455 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6459 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6462 arg1_unw = get_unwidened (arg1, shorter_type);
6464 /* If possible, express the comparison in the shorter mode. */
6465 if ((code == EQ_EXPR || code == NE_EXPR
6466 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6467 && (TREE_TYPE (arg1_unw) == shorter_type
6468 || (TREE_CODE (arg1_unw) == INTEGER_CST
6469 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6470 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6471 && int_fits_type_p (arg1_unw, shorter_type))))
6472 return fold_build2 (code, type, arg0_unw,
6473 fold_convert (shorter_type, arg1_unw));
6475 if (TREE_CODE (arg1_unw) != INTEGER_CST
6476 || TREE_CODE (shorter_type) != INTEGER_TYPE
6477 || !int_fits_type_p (arg1_unw, shorter_type))
6480 /* If we are comparing with the integer that does not fit into the range
6481 of the shorter type, the result is known. */
6482 outer_type = TREE_TYPE (arg1_unw);
6483 min = lower_bound_in_type (outer_type, shorter_type);
6484 max = upper_bound_in_type (outer_type, shorter_type);
6486 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6488 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6495 return omit_one_operand (type, integer_zero_node, arg0);
6500 return omit_one_operand (type, integer_one_node, arg0);
6506 return omit_one_operand (type, integer_one_node, arg0);
6508 return omit_one_operand (type, integer_zero_node, arg0);
6513 return omit_one_operand (type, integer_zero_node, arg0);
6515 return omit_one_operand (type, integer_one_node, arg0);
6524 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6525 ARG0 just the signedness is changed. */
6528 fold_sign_changed_comparison (enum tree_code code, tree type,
6529 tree arg0, tree arg1)
6531 tree arg0_inner, tmp;
6532 tree inner_type, outer_type;
6534 if (TREE_CODE (arg0) != NOP_EXPR
6535 && TREE_CODE (arg0) != CONVERT_EXPR)
6538 outer_type = TREE_TYPE (arg0);
6539 arg0_inner = TREE_OPERAND (arg0, 0);
6540 inner_type = TREE_TYPE (arg0_inner);
6542 #ifdef HAVE_canonicalize_funcptr_for_compare
6543 /* Disable this optimization if we're casting a function pointer
6544 type on targets that require function pointer canonicalization. */
6545 if (HAVE_canonicalize_funcptr_for_compare
6546 && TREE_CODE (inner_type) == POINTER_TYPE
6547 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6551 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6554 if (TREE_CODE (arg1) != INTEGER_CST
6555 && !((TREE_CODE (arg1) == NOP_EXPR
6556 || TREE_CODE (arg1) == CONVERT_EXPR)
6557 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6560 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6565 if (TREE_CODE (arg1) == INTEGER_CST)
6567 tmp = build_int_cst_wide (inner_type,
6568 TREE_INT_CST_LOW (arg1),
6569 TREE_INT_CST_HIGH (arg1));
6570 arg1 = force_fit_type (tmp, 0,
6571 TREE_OVERFLOW (arg1),
6572 TREE_CONSTANT_OVERFLOW (arg1));
6575 arg1 = fold_convert (inner_type, arg1);
6577 return fold_build2 (code, type, arg0_inner, arg1);
6580 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6581 step of the array. Reconstructs s and delta in the case of s * delta
6582 being an integer constant (and thus already folded).
6583 ADDR is the address. MULT is the multiplicative expression.
6584 If the function succeeds, the new address expression is returned. Otherwise
6585 NULL_TREE is returned. */
6588 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6590 tree s, delta, step;
6591 tree ref = TREE_OPERAND (addr, 0), pref;
6595 /* Canonicalize op1 into a possibly non-constant delta
6596 and an INTEGER_CST s. */
6597 if (TREE_CODE (op1) == MULT_EXPR)
6599 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6604 if (TREE_CODE (arg0) == INTEGER_CST)
6609 else if (TREE_CODE (arg1) == INTEGER_CST)
6617 else if (TREE_CODE (op1) == INTEGER_CST)
6624 /* Simulate we are delta * 1. */
6626 s = integer_one_node;
6629 for (;; ref = TREE_OPERAND (ref, 0))
6631 if (TREE_CODE (ref) == ARRAY_REF)
6633 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6637 step = array_ref_element_size (ref);
6638 if (TREE_CODE (step) != INTEGER_CST)
6643 if (! tree_int_cst_equal (step, s))
6648 /* Try if delta is a multiple of step. */
6649 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6658 if (!handled_component_p (ref))
6662 /* We found the suitable array reference. So copy everything up to it,
6663 and replace the index. */
6665 pref = TREE_OPERAND (addr, 0);
6666 ret = copy_node (pref);
6671 pref = TREE_OPERAND (pref, 0);
6672 TREE_OPERAND (pos, 0) = copy_node (pref);
6673 pos = TREE_OPERAND (pos, 0);
6676 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6677 fold_convert (itype,
6678 TREE_OPERAND (pos, 1)),
6679 fold_convert (itype, delta));
6681 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6685 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6686 means A >= Y && A != MAX, but in this case we know that
6687 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6690 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6692 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6694 if (TREE_CODE (bound) == LT_EXPR)
6695 a = TREE_OPERAND (bound, 0);
6696 else if (TREE_CODE (bound) == GT_EXPR)
6697 a = TREE_OPERAND (bound, 1);
6701 typea = TREE_TYPE (a);
6702 if (!INTEGRAL_TYPE_P (typea)
6703 && !POINTER_TYPE_P (typea))
6706 if (TREE_CODE (ineq) == LT_EXPR)
6708 a1 = TREE_OPERAND (ineq, 1);
6709 y = TREE_OPERAND (ineq, 0);
6711 else if (TREE_CODE (ineq) == GT_EXPR)
6713 a1 = TREE_OPERAND (ineq, 0);
6714 y = TREE_OPERAND (ineq, 1);
6719 if (TREE_TYPE (a1) != typea)
6722 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6723 if (!integer_onep (diff))
6726 return fold_build2 (GE_EXPR, type, a, y);
6729 /* Fold a sum or difference of at least one multiplication.
6730 Returns the folded tree or NULL if no simplification could be made. */
6733 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6735 tree arg00, arg01, arg10, arg11;
6736 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6738 /* (A * C) +- (B * C) -> (A+-B) * C.
6739 (A * C) +- A -> A * (C+-1).
6740 We are most concerned about the case where C is a constant,
6741 but other combinations show up during loop reduction. Since
6742 it is not difficult, try all four possibilities. */
6744 if (TREE_CODE (arg0) == MULT_EXPR)
6746 arg00 = TREE_OPERAND (arg0, 0);
6747 arg01 = TREE_OPERAND (arg0, 1);
6752 arg01 = build_one_cst (type);
6754 if (TREE_CODE (arg1) == MULT_EXPR)
6756 arg10 = TREE_OPERAND (arg1, 0);
6757 arg11 = TREE_OPERAND (arg1, 1);
6762 arg11 = build_one_cst (type);
6766 if (operand_equal_p (arg01, arg11, 0))
6767 same = arg01, alt0 = arg00, alt1 = arg10;
6768 else if (operand_equal_p (arg00, arg10, 0))
6769 same = arg00, alt0 = arg01, alt1 = arg11;
6770 else if (operand_equal_p (arg00, arg11, 0))
6771 same = arg00, alt0 = arg01, alt1 = arg10;
6772 else if (operand_equal_p (arg01, arg10, 0))
6773 same = arg01, alt0 = arg00, alt1 = arg11;
6775 /* No identical multiplicands; see if we can find a common
6776 power-of-two factor in non-power-of-two multiplies. This
6777 can help in multi-dimensional array access. */
6778 else if (host_integerp (arg01, 0)
6779 && host_integerp (arg11, 0))
6781 HOST_WIDE_INT int01, int11, tmp;
6784 int01 = TREE_INT_CST_LOW (arg01);
6785 int11 = TREE_INT_CST_LOW (arg11);
6787 /* Move min of absolute values to int11. */
6788 if ((int01 >= 0 ? int01 : -int01)
6789 < (int11 >= 0 ? int11 : -int11))
6791 tmp = int01, int01 = int11, int11 = tmp;
6792 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6799 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6801 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6802 build_int_cst (TREE_TYPE (arg00),
6807 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6812 return fold_build2 (MULT_EXPR, type,
6813 fold_build2 (code, type,
6814 fold_convert (type, alt0),
6815 fold_convert (type, alt1)),
6816 fold_convert (type, same));
6821 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6822 specified by EXPR into the buffer PTR of length LEN bytes.
6823 Return the number of bytes placed in the buffer, or zero
6827 native_encode_int (tree expr, unsigned char *ptr, int len)
6829 tree type = TREE_TYPE (expr);
6830 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6831 int byte, offset, word, words;
6832 unsigned char value;
6834 if (total_bytes > len)
6836 words = total_bytes / UNITS_PER_WORD;
6838 for (byte = 0; byte < total_bytes; byte++)
6840 int bitpos = byte * BITS_PER_UNIT;
6841 if (bitpos < HOST_BITS_PER_WIDE_INT)
6842 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6844 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6845 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6847 if (total_bytes > UNITS_PER_WORD)
6849 word = byte / UNITS_PER_WORD;
6850 if (WORDS_BIG_ENDIAN)
6851 word = (words - 1) - word;
6852 offset = word * UNITS_PER_WORD;
6853 if (BYTES_BIG_ENDIAN)
6854 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6856 offset += byte % UNITS_PER_WORD;
6859 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6860 ptr[offset] = value;
6866 /* Subroutine of native_encode_expr. Encode the REAL_CST
6867 specified by EXPR into the buffer PTR of length LEN bytes.
6868 Return the number of bytes placed in the buffer, or zero
6872 native_encode_real (tree expr, unsigned char *ptr, int len)
6874 tree type = TREE_TYPE (expr);
6875 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6876 int byte, offset, word, words;
6877 unsigned char value;
6879 /* There are always 32 bits in each long, no matter the size of
6880 the hosts long. We handle floating point representations with
6884 if (total_bytes > len)
6886 words = total_bytes / UNITS_PER_WORD;
6888 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6890 for (byte = 0; byte < total_bytes; byte++)
6892 int bitpos = byte * BITS_PER_UNIT;
6893 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6895 if (total_bytes > UNITS_PER_WORD)
6897 word = byte / UNITS_PER_WORD;
6898 if (FLOAT_WORDS_BIG_ENDIAN)
6899 word = (words - 1) - word;
6900 offset = word * UNITS_PER_WORD;
6901 if (BYTES_BIG_ENDIAN)
6902 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6904 offset += byte % UNITS_PER_WORD;
6907 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6908 ptr[offset] = value;
6913 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6914 specified by EXPR into the buffer PTR of length LEN bytes.
6915 Return the number of bytes placed in the buffer, or zero
6919 native_encode_complex (tree expr, unsigned char *ptr, int len)
6924 part = TREE_REALPART (expr);
6925 rsize = native_encode_expr (part, ptr, len);
6928 part = TREE_IMAGPART (expr);
6929 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6932 return rsize + isize;
6936 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6937 specified by EXPR into the buffer PTR of length LEN bytes.
6938 Return the number of bytes placed in the buffer, or zero
6942 native_encode_vector (tree expr, unsigned char *ptr, int len)
6944 int i, size, offset, count;
6945 tree itype, elem, elements;
6948 elements = TREE_VECTOR_CST_ELTS (expr);
6949 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6950 itype = TREE_TYPE (TREE_TYPE (expr));
6951 size = GET_MODE_SIZE (TYPE_MODE (itype));
6952 for (i = 0; i < count; i++)
6956 elem = TREE_VALUE (elements);
6957 elements = TREE_CHAIN (elements);
6964 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6969 if (offset + size > len)
6971 memset (ptr+offset, 0, size);
6979 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6980 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6981 buffer PTR of length LEN bytes. Return the number of bytes
6982 placed in the buffer, or zero upon failure. */
6985 native_encode_expr (tree expr, unsigned char *ptr, int len)
6987 switch (TREE_CODE (expr))
6990 return native_encode_int (expr, ptr, len);
6993 return native_encode_real (expr, ptr, len);
6996 return native_encode_complex (expr, ptr, len);
6999 return native_encode_vector (expr, ptr, len);
7007 /* Subroutine of native_interpret_expr. Interpret the contents of
7008 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7009 If the buffer cannot be interpreted, return NULL_TREE. */
7012 native_interpret_int (tree type, unsigned char *ptr, int len)
7014 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7015 int byte, offset, word, words;
7016 unsigned char value;
7017 unsigned int HOST_WIDE_INT lo = 0;
7018 HOST_WIDE_INT hi = 0;
7020 if (total_bytes > len)
7022 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7024 words = total_bytes / UNITS_PER_WORD;
7026 for (byte = 0; byte < total_bytes; byte++)
7028 int bitpos = byte * BITS_PER_UNIT;
7029 if (total_bytes > UNITS_PER_WORD)
7031 word = byte / UNITS_PER_WORD;
7032 if (WORDS_BIG_ENDIAN)
7033 word = (words - 1) - word;
7034 offset = word * UNITS_PER_WORD;
7035 if (BYTES_BIG_ENDIAN)
7036 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7038 offset += byte % UNITS_PER_WORD;
7041 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7042 value = ptr[offset];
7044 if (bitpos < HOST_BITS_PER_WIDE_INT)
7045 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7047 hi |= (unsigned HOST_WIDE_INT) value
7048 << (bitpos - HOST_BITS_PER_WIDE_INT);
7051 return force_fit_type (build_int_cst_wide (type, lo, hi),
7056 /* Subroutine of native_interpret_expr. Interpret the contents of
7057 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7058 If the buffer cannot be interpreted, return NULL_TREE. */
7061 native_interpret_real (tree type, unsigned char *ptr, int len)
7063 enum machine_mode mode = TYPE_MODE (type);
7064 int total_bytes = GET_MODE_SIZE (mode);
7065 int byte, offset, word, words;
7066 unsigned char value;
7067 /* There are always 32 bits in each long, no matter the size of
7068 the hosts long. We handle floating point representations with
7073 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7074 if (total_bytes > len || total_bytes > 24)
7076 words = total_bytes / UNITS_PER_WORD;
7078 memset (tmp, 0, sizeof (tmp));
7079 for (byte = 0; byte < total_bytes; byte++)
7081 int bitpos = byte * BITS_PER_UNIT;
7082 if (total_bytes > UNITS_PER_WORD)
7084 word = byte / UNITS_PER_WORD;
7085 if (FLOAT_WORDS_BIG_ENDIAN)
7086 word = (words - 1) - word;
7087 offset = word * UNITS_PER_WORD;
7088 if (BYTES_BIG_ENDIAN)
7089 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7091 offset += byte % UNITS_PER_WORD;
7094 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7095 value = ptr[offset];
7097 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7100 real_from_target (&r, tmp, mode);
7101 return build_real (type, r);
7105 /* Subroutine of native_interpret_expr. Interpret the contents of
7106 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7107 If the buffer cannot be interpreted, return NULL_TREE. */
7110 native_interpret_complex (tree type, unsigned char *ptr, int len)
7112 tree etype, rpart, ipart;
7115 etype = TREE_TYPE (type);
7116 size = GET_MODE_SIZE (TYPE_MODE (etype));
7119 rpart = native_interpret_expr (etype, ptr, size);
7122 ipart = native_interpret_expr (etype, ptr+size, size);
7125 return build_complex (type, rpart, ipart);
7129 /* Subroutine of native_interpret_expr. Interpret the contents of
7130 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7131 If the buffer cannot be interpreted, return NULL_TREE. */
7134 native_interpret_vector (tree type, unsigned char *ptr, int len)
7136 tree etype, elem, elements;
7139 etype = TREE_TYPE (type);
7140 size = GET_MODE_SIZE (TYPE_MODE (etype));
7141 count = TYPE_VECTOR_SUBPARTS (type);
7142 if (size * count > len)
7145 elements = NULL_TREE;
7146 for (i = count - 1; i >= 0; i--)
7148 elem = native_interpret_expr (etype, ptr+(i*size), size);
7151 elements = tree_cons (NULL_TREE, elem, elements);
7153 return build_vector (type, elements);
7157 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7158 the buffer PTR of length LEN as a constant of type TYPE. For
7159 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7160 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7161 return NULL_TREE. */
7164 native_interpret_expr (tree type, unsigned char *ptr, int len)
7166 switch (TREE_CODE (type))
7171 return native_interpret_int (type, ptr, len);
7174 return native_interpret_real (type, ptr, len);
7177 return native_interpret_complex (type, ptr, len);
7180 return native_interpret_vector (type, ptr, len);
7188 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7189 TYPE at compile-time. If we're unable to perform the conversion
7190 return NULL_TREE. */
7193 fold_view_convert_expr (tree type, tree expr)
7195 /* We support up to 512-bit values (for V8DFmode). */
7196 unsigned char buffer[64];
7199 /* Check that the host and target are sane. */
7200 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7203 len = native_encode_expr (expr, buffer, sizeof (buffer));
7207 return native_interpret_expr (type, buffer, len);
7211 /* Fold a unary expression of code CODE and type TYPE with operand
7212 OP0. Return the folded expression if folding is successful.
7213 Otherwise, return NULL_TREE. */
7216 fold_unary (enum tree_code code, tree type, tree op0)
7220 enum tree_code_class kind = TREE_CODE_CLASS (code);
7222 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7223 && TREE_CODE_LENGTH (code) == 1);
7228 if (code == NOP_EXPR || code == CONVERT_EXPR
7229 || code == FLOAT_EXPR || code == ABS_EXPR)
7231 /* Don't use STRIP_NOPS, because signedness of argument type
7233 STRIP_SIGN_NOPS (arg0);
7237 /* Strip any conversions that don't change the mode. This
7238 is safe for every expression, except for a comparison
7239 expression because its signedness is derived from its
7242 Note that this is done as an internal manipulation within
7243 the constant folder, in order to find the simplest
7244 representation of the arguments so that their form can be
7245 studied. In any cases, the appropriate type conversions
7246 should be put back in the tree that will get out of the
7252 if (TREE_CODE_CLASS (code) == tcc_unary)
7254 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7255 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7256 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7257 else if (TREE_CODE (arg0) == COND_EXPR)
7259 tree arg01 = TREE_OPERAND (arg0, 1);
7260 tree arg02 = TREE_OPERAND (arg0, 2);
7261 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7262 arg01 = fold_build1 (code, type, arg01);
7263 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7264 arg02 = fold_build1 (code, type, arg02);
7265 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7268 /* If this was a conversion, and all we did was to move into
7269 inside the COND_EXPR, bring it back out. But leave it if
7270 it is a conversion from integer to integer and the
7271 result precision is no wider than a word since such a
7272 conversion is cheap and may be optimized away by combine,
7273 while it couldn't if it were outside the COND_EXPR. Then return
7274 so we don't get into an infinite recursion loop taking the
7275 conversion out and then back in. */
7277 if ((code == NOP_EXPR || code == CONVERT_EXPR
7278 || code == NON_LVALUE_EXPR)
7279 && TREE_CODE (tem) == COND_EXPR
7280 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7281 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7282 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7283 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7284 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7285 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7286 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7288 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7289 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7290 || flag_syntax_only))
7291 tem = build1 (code, type,
7293 TREE_TYPE (TREE_OPERAND
7294 (TREE_OPERAND (tem, 1), 0)),
7295 TREE_OPERAND (tem, 0),
7296 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7297 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7300 else if (COMPARISON_CLASS_P (arg0))
7302 if (TREE_CODE (type) == BOOLEAN_TYPE)
7304 arg0 = copy_node (arg0);
7305 TREE_TYPE (arg0) = type;
7308 else if (TREE_CODE (type) != INTEGER_TYPE)
7309 return fold_build3 (COND_EXPR, type, arg0,
7310 fold_build1 (code, type,
7312 fold_build1 (code, type,
7313 integer_zero_node));
7322 case FIX_TRUNC_EXPR:
7323 if (TREE_TYPE (op0) == type)
7326 /* If we have (type) (a CMP b) and type is an integral type, return
7327 new expression involving the new type. */
7328 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7329 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7330 TREE_OPERAND (op0, 1));
7332 /* Handle cases of two conversions in a row. */
7333 if (TREE_CODE (op0) == NOP_EXPR
7334 || TREE_CODE (op0) == CONVERT_EXPR)
7336 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7337 tree inter_type = TREE_TYPE (op0);
7338 int inside_int = INTEGRAL_TYPE_P (inside_type);
7339 int inside_ptr = POINTER_TYPE_P (inside_type);
7340 int inside_float = FLOAT_TYPE_P (inside_type);
7341 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7342 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7343 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7344 int inter_int = INTEGRAL_TYPE_P (inter_type);
7345 int inter_ptr = POINTER_TYPE_P (inter_type);
7346 int inter_float = FLOAT_TYPE_P (inter_type);
7347 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7348 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7349 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7350 int final_int = INTEGRAL_TYPE_P (type);
7351 int final_ptr = POINTER_TYPE_P (type);
7352 int final_float = FLOAT_TYPE_P (type);
7353 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7354 unsigned int final_prec = TYPE_PRECISION (type);
7355 int final_unsignedp = TYPE_UNSIGNED (type);
7357 /* In addition to the cases of two conversions in a row
7358 handled below, if we are converting something to its own
7359 type via an object of identical or wider precision, neither
7360 conversion is needed. */
7361 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7362 && (((inter_int || inter_ptr) && final_int)
7363 || (inter_float && final_float))
7364 && inter_prec >= final_prec)
7365 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7367 /* Likewise, if the intermediate and final types are either both
7368 float or both integer, we don't need the middle conversion if
7369 it is wider than the final type and doesn't change the signedness
7370 (for integers). Avoid this if the final type is a pointer
7371 since then we sometimes need the inner conversion. Likewise if
7372 the outer has a precision not equal to the size of its mode. */
7373 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7374 || (inter_float && inside_float)
7375 || (inter_vec && inside_vec))
7376 && inter_prec >= inside_prec
7377 && (inter_float || inter_vec
7378 || inter_unsignedp == inside_unsignedp)
7379 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7380 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7382 && (! final_vec || inter_prec == inside_prec))
7383 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7385 /* If we have a sign-extension of a zero-extended value, we can
7386 replace that by a single zero-extension. */
7387 if (inside_int && inter_int && final_int
7388 && inside_prec < inter_prec && inter_prec < final_prec
7389 && inside_unsignedp && !inter_unsignedp)
7390 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7392 /* Two conversions in a row are not needed unless:
7393 - some conversion is floating-point (overstrict for now), or
7394 - some conversion is a vector (overstrict for now), or
7395 - the intermediate type is narrower than both initial and
7397 - the intermediate type and innermost type differ in signedness,
7398 and the outermost type is wider than the intermediate, or
7399 - the initial type is a pointer type and the precisions of the
7400 intermediate and final types differ, or
7401 - the final type is a pointer type and the precisions of the
7402 initial and intermediate types differ.
7403 - the final type is a pointer type and the initial type not
7404 - the initial type is a pointer to an array and the final type
7406 if (! inside_float && ! inter_float && ! final_float
7407 && ! inside_vec && ! inter_vec && ! final_vec
7408 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7409 && ! (inside_int && inter_int
7410 && inter_unsignedp != inside_unsignedp
7411 && inter_prec < final_prec)
7412 && ((inter_unsignedp && inter_prec > inside_prec)
7413 == (final_unsignedp && final_prec > inter_prec))
7414 && ! (inside_ptr && inter_prec != final_prec)
7415 && ! (final_ptr && inside_prec != inter_prec)
7416 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7417 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7418 && final_ptr == inside_ptr
7420 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7421 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7422 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7425 /* Handle (T *)&A.B.C for A being of type T and B and C
7426 living at offset zero. This occurs frequently in
7427 C++ upcasting and then accessing the base. */
7428 if (TREE_CODE (op0) == ADDR_EXPR
7429 && POINTER_TYPE_P (type)
7430 && handled_component_p (TREE_OPERAND (op0, 0)))
7432 HOST_WIDE_INT bitsize, bitpos;
7434 enum machine_mode mode;
7435 int unsignedp, volatilep;
7436 tree base = TREE_OPERAND (op0, 0);
7437 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7438 &mode, &unsignedp, &volatilep, false);
7439 /* If the reference was to a (constant) zero offset, we can use
7440 the address of the base if it has the same base type
7441 as the result type. */
7442 if (! offset && bitpos == 0
7443 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7444 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7445 return fold_convert (type, build_fold_addr_expr (base));
7448 if (TREE_CODE (op0) == MODIFY_EXPR
7449 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7450 /* Detect assigning a bitfield. */
7451 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7452 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7454 /* Don't leave an assignment inside a conversion
7455 unless assigning a bitfield. */
7456 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7457 /* First do the assignment, then return converted constant. */
7458 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7459 TREE_NO_WARNING (tem) = 1;
7460 TREE_USED (tem) = 1;
7464 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7465 constants (if x has signed type, the sign bit cannot be set
7466 in c). This folds extension into the BIT_AND_EXPR. */
7467 if (INTEGRAL_TYPE_P (type)
7468 && TREE_CODE (type) != BOOLEAN_TYPE
7469 && TREE_CODE (op0) == BIT_AND_EXPR
7470 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7473 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7476 if (TYPE_UNSIGNED (TREE_TYPE (and))
7477 || (TYPE_PRECISION (type)
7478 <= TYPE_PRECISION (TREE_TYPE (and))))
7480 else if (TYPE_PRECISION (TREE_TYPE (and1))
7481 <= HOST_BITS_PER_WIDE_INT
7482 && host_integerp (and1, 1))
7484 unsigned HOST_WIDE_INT cst;
7486 cst = tree_low_cst (and1, 1);
7487 cst &= (HOST_WIDE_INT) -1
7488 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7489 change = (cst == 0);
7490 #ifdef LOAD_EXTEND_OP
7492 && !flag_syntax_only
7493 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7496 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7497 and0 = fold_convert (uns, and0);
7498 and1 = fold_convert (uns, and1);
7504 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7505 TREE_INT_CST_HIGH (and1));
7506 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7507 TREE_CONSTANT_OVERFLOW (and1));
7508 return fold_build2 (BIT_AND_EXPR, type,
7509 fold_convert (type, and0), tem);
7513 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7514 T2 being pointers to types of the same size. */
7515 if (POINTER_TYPE_P (type)
7516 && BINARY_CLASS_P (arg0)
7517 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7518 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7520 tree arg00 = TREE_OPERAND (arg0, 0);
7522 tree t1 = TREE_TYPE (arg00);
7523 tree tt0 = TREE_TYPE (t0);
7524 tree tt1 = TREE_TYPE (t1);
7525 tree s0 = TYPE_SIZE (tt0);
7526 tree s1 = TYPE_SIZE (tt1);
7528 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7529 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7530 TREE_OPERAND (arg0, 1));
7533 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7534 of the same precision, and X is a integer type not narrower than
7535 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7536 if (INTEGRAL_TYPE_P (type)
7537 && TREE_CODE (op0) == BIT_NOT_EXPR
7538 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7539 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7540 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7541 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7543 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7544 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7545 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7546 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7549 tem = fold_convert_const (code, type, arg0);
7550 return tem ? tem : NULL_TREE;
7552 case VIEW_CONVERT_EXPR:
7553 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7554 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7555 return fold_view_convert_expr (type, op0);
7558 tem = fold_negate_expr (arg0);
7560 return fold_convert (type, tem);
7564 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7565 return fold_abs_const (arg0, type);
7566 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7567 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7568 /* Convert fabs((double)float) into (double)fabsf(float). */
7569 else if (TREE_CODE (arg0) == NOP_EXPR
7570 && TREE_CODE (type) == REAL_TYPE)
7572 tree targ0 = strip_float_extensions (arg0);
7574 return fold_convert (type, fold_build1 (ABS_EXPR,
7578 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7579 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7582 /* Strip sign ops from argument. */
7583 if (TREE_CODE (type) == REAL_TYPE)
7585 tem = fold_strip_sign_ops (arg0);
7587 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7592 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7593 return fold_convert (type, arg0);
7594 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7596 tree itype = TREE_TYPE (type);
7597 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7598 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7599 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7601 if (TREE_CODE (arg0) == COMPLEX_CST)
7603 tree itype = TREE_TYPE (type);
7604 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7605 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7606 return build_complex (type, rpart, negate_expr (ipart));
7608 if (TREE_CODE (arg0) == CONJ_EXPR)
7609 return fold_convert (type, TREE_OPERAND (arg0, 0));
7613 if (TREE_CODE (arg0) == INTEGER_CST)
7614 return fold_not_const (arg0, type);
7615 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7616 return TREE_OPERAND (arg0, 0);
7617 /* Convert ~ (-A) to A - 1. */
7618 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7619 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7620 build_int_cst (type, 1));
7621 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7622 else if (INTEGRAL_TYPE_P (type)
7623 && ((TREE_CODE (arg0) == MINUS_EXPR
7624 && integer_onep (TREE_OPERAND (arg0, 1)))
7625 || (TREE_CODE (arg0) == PLUS_EXPR
7626 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7627 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7628 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7629 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7630 && (tem = fold_unary (BIT_NOT_EXPR, type,
7632 TREE_OPERAND (arg0, 0)))))
7633 return fold_build2 (BIT_XOR_EXPR, type, tem,
7634 fold_convert (type, TREE_OPERAND (arg0, 1)));
7635 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7636 && (tem = fold_unary (BIT_NOT_EXPR, type,
7638 TREE_OPERAND (arg0, 1)))))
7639 return fold_build2 (BIT_XOR_EXPR, type,
7640 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7644 case TRUTH_NOT_EXPR:
7645 /* The argument to invert_truthvalue must have Boolean type. */
7646 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7647 arg0 = fold_convert (boolean_type_node, arg0);
7649 /* Note that the operand of this must be an int
7650 and its values must be 0 or 1.
7651 ("true" is a fixed value perhaps depending on the language,
7652 but we don't handle values other than 1 correctly yet.) */
7653 tem = fold_truth_not_expr (arg0);
7656 return fold_convert (type, tem);
7659 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7660 return fold_convert (type, arg0);
7661 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7662 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7663 TREE_OPERAND (arg0, 1));
7664 if (TREE_CODE (arg0) == COMPLEX_CST)
7665 return fold_convert (type, TREE_REALPART (arg0));
7666 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7668 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7669 tem = fold_build2 (TREE_CODE (arg0), itype,
7670 fold_build1 (REALPART_EXPR, itype,
7671 TREE_OPERAND (arg0, 0)),
7672 fold_build1 (REALPART_EXPR, itype,
7673 TREE_OPERAND (arg0, 1)));
7674 return fold_convert (type, tem);
7676 if (TREE_CODE (arg0) == CONJ_EXPR)
7678 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7679 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7680 return fold_convert (type, tem);
7685 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7686 return fold_convert (type, integer_zero_node);
7687 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7688 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7689 TREE_OPERAND (arg0, 0));
7690 if (TREE_CODE (arg0) == COMPLEX_CST)
7691 return fold_convert (type, TREE_IMAGPART (arg0));
7692 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7694 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7695 tem = fold_build2 (TREE_CODE (arg0), itype,
7696 fold_build1 (IMAGPART_EXPR, itype,
7697 TREE_OPERAND (arg0, 0)),
7698 fold_build1 (IMAGPART_EXPR, itype,
7699 TREE_OPERAND (arg0, 1)));
7700 return fold_convert (type, tem);
7702 if (TREE_CODE (arg0) == CONJ_EXPR)
7704 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7705 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7706 return fold_convert (type, negate_expr (tem));
7712 } /* switch (code) */
7715 /* Fold a binary expression of code CODE and type TYPE with operands
7716 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7717 Return the folded expression if folding is successful. Otherwise,
7718 return NULL_TREE. */
7721 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7723 enum tree_code compl_code;
7725 if (code == MIN_EXPR)
7726 compl_code = MAX_EXPR;
7727 else if (code == MAX_EXPR)
7728 compl_code = MIN_EXPR;
7732 /* MIN (MAX (a, b), b) == b. Â */
7733 if (TREE_CODE (op0) == compl_code
7734 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7735 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7737 /* MIN (MAX (b, a), b) == b. Â */
7738 if (TREE_CODE (op0) == compl_code
7739 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7740 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7741 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7743 /* MIN (a, MAX (a, b)) == a. Â */
7744 if (TREE_CODE (op1) == compl_code
7745 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7746 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7747 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7749 /* MIN (a, MAX (b, a)) == a. Â */
7750 if (TREE_CODE (op1) == compl_code
7751 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7752 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7753 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7758 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7759 by changing CODE to reduce the magnitude of constants involved in
7760 ARG0 of the comparison.
7761 Returns a canonicalized comparison tree if a simplification was
7762 possible, otherwise returns NULL_TREE. */
7765 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7766 tree arg0, tree arg1)
7768 enum tree_code code0 = TREE_CODE (arg0);
7769 tree t, cst0 = NULL_TREE;
7773 /* Match A +- CST code arg1 and CST code arg1. */
7774 if (!(((code0 == MINUS_EXPR
7775 || code0 == PLUS_EXPR)
7776 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7777 || code0 == INTEGER_CST))
7780 /* Identify the constant in arg0 and its sign. */
7781 if (code0 == INTEGER_CST)
7784 cst0 = TREE_OPERAND (arg0, 1);
7785 sgn0 = tree_int_cst_sgn (cst0);
7787 /* Overflowed constants and zero will cause problems. */
7788 if (integer_zerop (cst0)
7789 || TREE_OVERFLOW (cst0))
7792 /* See if we can reduce the mangitude of the constant in
7793 arg0 by changing the comparison code. */
7794 if (code0 == INTEGER_CST)
7796 /* CST <= arg1 -> CST-1 < arg1. */
7797 if (code == LE_EXPR && sgn0 == 1)
7799 /* -CST < arg1 -> -CST-1 <= arg1. */
7800 else if (code == LT_EXPR && sgn0 == -1)
7802 /* CST > arg1 -> CST-1 >= arg1. */
7803 else if (code == GT_EXPR && sgn0 == 1)
7805 /* -CST >= arg1 -> -CST-1 > arg1. */
7806 else if (code == GE_EXPR && sgn0 == -1)
7810 /* arg1 code' CST' might be more canonical. */
7815 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7817 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7819 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7820 else if (code == GT_EXPR
7821 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7823 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7824 else if (code == LE_EXPR
7825 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7827 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7828 else if (code == GE_EXPR
7829 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7835 /* Now build the constant reduced in magnitude. */
7836 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7837 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7838 if (code0 != INTEGER_CST)
7839 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7841 /* If swapping might yield to a more canonical form, do so. */
7843 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7845 return fold_build2 (code, type, t, arg1);
7848 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7849 overflow further. Try to decrease the magnitude of constants involved
7850 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7851 and put sole constants at the second argument position.
7852 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7855 maybe_canonicalize_comparison (enum tree_code code, tree type,
7856 tree arg0, tree arg1)
7860 /* In principle pointers also have undefined overflow behavior,
7861 but that causes problems elsewhere. */
7862 if ((flag_wrapv || flag_trapv)
7863 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7864 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7867 /* Try canonicalization by simplifying arg0. */
7868 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7872 /* Try canonicalization by simplifying arg1 using the swapped
7874 code = swap_tree_comparison (code);
7875 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7878 /* Subroutine of fold_binary. This routine performs all of the
7879 transformations that are common to the equality/inequality
7880 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7881 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7882 fold_binary should call fold_binary. Fold a comparison with
7883 tree code CODE and type TYPE with operands OP0 and OP1. Return
7884 the folded comparison or NULL_TREE. */
7887 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7889 tree arg0, arg1, tem;
7894 STRIP_SIGN_NOPS (arg0);
7895 STRIP_SIGN_NOPS (arg1);
7897 tem = fold_relational_const (code, type, arg0, arg1);
7898 if (tem != NULL_TREE)
7901 /* If one arg is a real or integer constant, put it last. */
7902 if (tree_swap_operands_p (arg0, arg1, true))
7903 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7905 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7906 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7907 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7908 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7909 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7910 && !(flag_wrapv || flag_trapv))
7911 && (TREE_CODE (arg1) == INTEGER_CST
7912 && !TREE_OVERFLOW (arg1)))
7914 tree const1 = TREE_OPERAND (arg0, 1);
7916 tree variable = TREE_OPERAND (arg0, 0);
7919 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7921 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7922 TREE_TYPE (arg1), const2, const1);
7923 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7924 && (TREE_CODE (lhs) != INTEGER_CST
7925 || !TREE_OVERFLOW (lhs)))
7926 return fold_build2 (code, type, variable, lhs);
7929 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7930 same object, then we can fold this to a comparison of the two offsets in
7931 signed size type. This is possible because pointer arithmetic is
7932 restricted to retain within an object and overflow on pointer differences
7933 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7934 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7935 && !flag_wrapv && !flag_trapv)
7937 tree base0, offset0, base1, offset1;
7939 if (extract_array_ref (arg0, &base0, &offset0)
7940 && extract_array_ref (arg1, &base1, &offset1)
7941 && operand_equal_p (base0, base1, 0))
7943 tree signed_size_type_node;
7944 signed_size_type_node = signed_type_for (size_type_node);
7946 /* By converting to signed size type we cover middle-end pointer
7947 arithmetic which operates on unsigned pointer types of size
7948 type size and ARRAY_REF offsets which are properly sign or
7949 zero extended from their type in case it is narrower than
7951 if (offset0 == NULL_TREE)
7952 offset0 = build_int_cst (signed_size_type_node, 0);
7954 offset0 = fold_convert (signed_size_type_node, offset0);
7955 if (offset1 == NULL_TREE)
7956 offset1 = build_int_cst (signed_size_type_node, 0);
7958 offset1 = fold_convert (signed_size_type_node, offset1);
7960 return fold_build2 (code, type, offset0, offset1);
7964 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
7965 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
7966 the resulting offset is smaller in absolute value than the
7968 if (!(flag_wrapv || flag_trapv)
7969 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7970 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7971 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7972 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
7973 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
7974 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7975 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
7977 tree const1 = TREE_OPERAND (arg0, 1);
7978 tree const2 = TREE_OPERAND (arg1, 1);
7979 tree variable1 = TREE_OPERAND (arg0, 0);
7980 tree variable2 = TREE_OPERAND (arg1, 0);
7983 /* Put the constant on the side where it doesn't overflow and is
7984 of lower absolute value than before. */
7985 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
7986 ? MINUS_EXPR : PLUS_EXPR,
7988 if (!TREE_OVERFLOW (cst)
7989 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
7990 return fold_build2 (code, type,
7992 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
7995 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
7996 ? MINUS_EXPR : PLUS_EXPR,
7998 if (!TREE_OVERFLOW (cst)
7999 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8000 return fold_build2 (code, type,
8001 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8006 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8010 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8012 tree targ0 = strip_float_extensions (arg0);
8013 tree targ1 = strip_float_extensions (arg1);
8014 tree newtype = TREE_TYPE (targ0);
8016 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8017 newtype = TREE_TYPE (targ1);
8019 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8020 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8021 return fold_build2 (code, type, fold_convert (newtype, targ0),
8022 fold_convert (newtype, targ1));
8024 /* (-a) CMP (-b) -> b CMP a */
8025 if (TREE_CODE (arg0) == NEGATE_EXPR
8026 && TREE_CODE (arg1) == NEGATE_EXPR)
8027 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8028 TREE_OPERAND (arg0, 0));
8030 if (TREE_CODE (arg1) == REAL_CST)
8032 REAL_VALUE_TYPE cst;
8033 cst = TREE_REAL_CST (arg1);
8035 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8036 if (TREE_CODE (arg0) == NEGATE_EXPR)
8037 return fold_build2 (swap_tree_comparison (code), type,
8038 TREE_OPERAND (arg0, 0),
8039 build_real (TREE_TYPE (arg1),
8040 REAL_VALUE_NEGATE (cst)));
8042 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8043 /* a CMP (-0) -> a CMP 0 */
8044 if (REAL_VALUE_MINUS_ZERO (cst))
8045 return fold_build2 (code, type, arg0,
8046 build_real (TREE_TYPE (arg1), dconst0));
8048 /* x != NaN is always true, other ops are always false. */
8049 if (REAL_VALUE_ISNAN (cst)
8050 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8052 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8053 return omit_one_operand (type, tem, arg0);
8056 /* Fold comparisons against infinity. */
8057 if (REAL_VALUE_ISINF (cst))
8059 tem = fold_inf_compare (code, type, arg0, arg1);
8060 if (tem != NULL_TREE)
8065 /* If this is a comparison of a real constant with a PLUS_EXPR
8066 or a MINUS_EXPR of a real constant, we can convert it into a
8067 comparison with a revised real constant as long as no overflow
8068 occurs when unsafe_math_optimizations are enabled. */
8069 if (flag_unsafe_math_optimizations
8070 && TREE_CODE (arg1) == REAL_CST
8071 && (TREE_CODE (arg0) == PLUS_EXPR
8072 || TREE_CODE (arg0) == MINUS_EXPR)
8073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8074 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8075 ? MINUS_EXPR : PLUS_EXPR,
8076 arg1, TREE_OPERAND (arg0, 1), 0))
8077 && ! TREE_CONSTANT_OVERFLOW (tem))
8078 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8080 /* Likewise, we can simplify a comparison of a real constant with
8081 a MINUS_EXPR whose first operand is also a real constant, i.e.
8082 (c1 - x) < c2 becomes x > c1-c2. */
8083 if (flag_unsafe_math_optimizations
8084 && TREE_CODE (arg1) == REAL_CST
8085 && TREE_CODE (arg0) == MINUS_EXPR
8086 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8087 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8089 && ! TREE_CONSTANT_OVERFLOW (tem))
8090 return fold_build2 (swap_tree_comparison (code), type,
8091 TREE_OPERAND (arg0, 1), tem);
8093 /* Fold comparisons against built-in math functions. */
8094 if (TREE_CODE (arg1) == REAL_CST
8095 && flag_unsafe_math_optimizations
8096 && ! flag_errno_math)
8098 enum built_in_function fcode = builtin_mathfn_code (arg0);
8100 if (fcode != END_BUILTINS)
8102 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8103 if (tem != NULL_TREE)
8109 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8110 if (TREE_CONSTANT (arg1)
8111 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8112 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8113 /* This optimization is invalid for ordered comparisons
8114 if CONST+INCR overflows or if foo+incr might overflow.
8115 This optimization is invalid for floating point due to rounding.
8116 For pointer types we assume overflow doesn't happen. */
8117 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8118 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8119 && (code == EQ_EXPR || code == NE_EXPR))))
8121 tree varop, newconst;
8123 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8125 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8126 arg1, TREE_OPERAND (arg0, 1));
8127 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8128 TREE_OPERAND (arg0, 0),
8129 TREE_OPERAND (arg0, 1));
8133 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8134 arg1, TREE_OPERAND (arg0, 1));
8135 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8136 TREE_OPERAND (arg0, 0),
8137 TREE_OPERAND (arg0, 1));
8141 /* If VAROP is a reference to a bitfield, we must mask
8142 the constant by the width of the field. */
8143 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8144 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8145 && host_integerp (DECL_SIZE (TREE_OPERAND
8146 (TREE_OPERAND (varop, 0), 1)), 1))
8148 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8149 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8150 tree folded_compare, shift;
8152 /* First check whether the comparison would come out
8153 always the same. If we don't do that we would
8154 change the meaning with the masking. */
8155 folded_compare = fold_build2 (code, type,
8156 TREE_OPERAND (varop, 0), arg1);
8157 if (TREE_CODE (folded_compare) == INTEGER_CST)
8158 return omit_one_operand (type, folded_compare, varop);
8160 shift = build_int_cst (NULL_TREE,
8161 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8162 shift = fold_convert (TREE_TYPE (varop), shift);
8163 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8165 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8169 return fold_build2 (code, type, varop, newconst);
8172 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8173 && (TREE_CODE (arg0) == NOP_EXPR
8174 || TREE_CODE (arg0) == CONVERT_EXPR))
8176 /* If we are widening one operand of an integer comparison,
8177 see if the other operand is similarly being widened. Perhaps we
8178 can do the comparison in the narrower type. */
8179 tem = fold_widened_comparison (code, type, arg0, arg1);
8183 /* Or if we are changing signedness. */
8184 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8189 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8190 constant, we can simplify it. */
8191 if (TREE_CODE (arg1) == INTEGER_CST
8192 && (TREE_CODE (arg0) == MIN_EXPR
8193 || TREE_CODE (arg0) == MAX_EXPR)
8194 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8196 tem = optimize_minmax_comparison (code, type, op0, op1);
8201 /* Simplify comparison of something with itself. (For IEEE
8202 floating-point, we can only do some of these simplifications.) */
8203 if (operand_equal_p (arg0, arg1, 0))
8208 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8209 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8210 return constant_boolean_node (1, type);
8215 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8216 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8217 return constant_boolean_node (1, type);
8218 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8221 /* For NE, we can only do this simplification if integer
8222 or we don't honor IEEE floating point NaNs. */
8223 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8224 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8226 /* ... fall through ... */
8229 return constant_boolean_node (0, type);
8235 /* If we are comparing an expression that just has comparisons
8236 of two integer values, arithmetic expressions of those comparisons,
8237 and constants, we can simplify it. There are only three cases
8238 to check: the two values can either be equal, the first can be
8239 greater, or the second can be greater. Fold the expression for
8240 those three values. Since each value must be 0 or 1, we have
8241 eight possibilities, each of which corresponds to the constant 0
8242 or 1 or one of the six possible comparisons.
8244 This handles common cases like (a > b) == 0 but also handles
8245 expressions like ((x > y) - (y > x)) > 0, which supposedly
8246 occur in macroized code. */
8248 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8250 tree cval1 = 0, cval2 = 0;
8253 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8254 /* Don't handle degenerate cases here; they should already
8255 have been handled anyway. */
8256 && cval1 != 0 && cval2 != 0
8257 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8258 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8259 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8260 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8261 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8262 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8263 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8265 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8266 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8268 /* We can't just pass T to eval_subst in case cval1 or cval2
8269 was the same as ARG1. */
8272 = fold_build2 (code, type,
8273 eval_subst (arg0, cval1, maxval,
8277 = fold_build2 (code, type,
8278 eval_subst (arg0, cval1, maxval,
8282 = fold_build2 (code, type,
8283 eval_subst (arg0, cval1, minval,
8287 /* All three of these results should be 0 or 1. Confirm they are.
8288 Then use those values to select the proper code to use. */
8290 if (TREE_CODE (high_result) == INTEGER_CST
8291 && TREE_CODE (equal_result) == INTEGER_CST
8292 && TREE_CODE (low_result) == INTEGER_CST)
8294 /* Make a 3-bit mask with the high-order bit being the
8295 value for `>', the next for '=', and the low for '<'. */
8296 switch ((integer_onep (high_result) * 4)
8297 + (integer_onep (equal_result) * 2)
8298 + integer_onep (low_result))
8302 return omit_one_operand (type, integer_zero_node, arg0);
8323 return omit_one_operand (type, integer_one_node, arg0);
8327 return save_expr (build2 (code, type, cval1, cval2));
8328 return fold_build2 (code, type, cval1, cval2);
8333 /* Fold a comparison of the address of COMPONENT_REFs with the same
8334 type and component to a comparison of the address of the base
8335 object. In short, &x->a OP &y->a to x OP y and
8336 &x->a OP &y.a to x OP &y */
8337 if (TREE_CODE (arg0) == ADDR_EXPR
8338 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8339 && TREE_CODE (arg1) == ADDR_EXPR
8340 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8342 tree cref0 = TREE_OPERAND (arg0, 0);
8343 tree cref1 = TREE_OPERAND (arg1, 0);
8344 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8346 tree op0 = TREE_OPERAND (cref0, 0);
8347 tree op1 = TREE_OPERAND (cref1, 0);
8348 return fold_build2 (code, type,
8349 build_fold_addr_expr (op0),
8350 build_fold_addr_expr (op1));
8354 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8355 into a single range test. */
8356 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8357 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8358 && TREE_CODE (arg1) == INTEGER_CST
8359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8360 && !integer_zerop (TREE_OPERAND (arg0, 1))
8361 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8362 && !TREE_OVERFLOW (arg1))
8364 tem = fold_div_compare (code, type, arg0, arg1);
8365 if (tem != NULL_TREE)
8369 /* Fold ~X op ~Y as Y op X. */
8370 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8371 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8372 return fold_build2 (code, type,
8373 TREE_OPERAND (arg1, 0),
8374 TREE_OPERAND (arg0, 0));
8376 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8377 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8378 && TREE_CODE (arg1) == INTEGER_CST)
8379 return fold_build2 (swap_tree_comparison (code), type,
8380 TREE_OPERAND (arg0, 0),
8381 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8387 /* Subroutine of fold_binary. Optimize complex multiplications of the
8388 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8389 argument EXPR represents the expression "z" of type TYPE. */
8392 fold_mult_zconjz (tree type, tree expr)
8394 tree itype = TREE_TYPE (type);
8395 tree rpart, ipart, tem;
8397 if (TREE_CODE (expr) == COMPLEX_EXPR)
8399 rpart = TREE_OPERAND (expr, 0);
8400 ipart = TREE_OPERAND (expr, 1);
8402 else if (TREE_CODE (expr) == COMPLEX_CST)
8404 rpart = TREE_REALPART (expr);
8405 ipart = TREE_IMAGPART (expr);
8409 expr = save_expr (expr);
8410 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8411 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8414 rpart = save_expr (rpart);
8415 ipart = save_expr (ipart);
8416 tem = fold_build2 (PLUS_EXPR, itype,
8417 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8418 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8419 return fold_build2 (COMPLEX_EXPR, type, tem,
8420 fold_convert (itype, integer_zero_node));
8424 /* Fold a binary expression of code CODE and type TYPE with operands
8425 OP0 and OP1. Return the folded expression if folding is
8426 successful. Otherwise, return NULL_TREE. */
8429 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8431 enum tree_code_class kind = TREE_CODE_CLASS (code);
8432 tree arg0, arg1, tem;
8433 tree t1 = NULL_TREE;
8435 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8436 && TREE_CODE_LENGTH (code) == 2
8438 && op1 != NULL_TREE);
8443 /* Strip any conversions that don't change the mode. This is
8444 safe for every expression, except for a comparison expression
8445 because its signedness is derived from its operands. So, in
8446 the latter case, only strip conversions that don't change the
8449 Note that this is done as an internal manipulation within the
8450 constant folder, in order to find the simplest representation
8451 of the arguments so that their form can be studied. In any
8452 cases, the appropriate type conversions should be put back in
8453 the tree that will get out of the constant folder. */
8455 if (kind == tcc_comparison)
8457 STRIP_SIGN_NOPS (arg0);
8458 STRIP_SIGN_NOPS (arg1);
8466 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8467 constant but we can't do arithmetic on them. */
8468 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8469 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8470 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8471 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8473 if (kind == tcc_binary)
8474 tem = const_binop (code, arg0, arg1, 0);
8475 else if (kind == tcc_comparison)
8476 tem = fold_relational_const (code, type, arg0, arg1);
8480 if (tem != NULL_TREE)
8482 if (TREE_TYPE (tem) != type)
8483 tem = fold_convert (type, tem);
8488 /* If this is a commutative operation, and ARG0 is a constant, move it
8489 to ARG1 to reduce the number of tests below. */
8490 if (commutative_tree_code (code)
8491 && tree_swap_operands_p (arg0, arg1, true))
8492 return fold_build2 (code, type, op1, op0);
8494 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8496 First check for cases where an arithmetic operation is applied to a
8497 compound, conditional, or comparison operation. Push the arithmetic
8498 operation inside the compound or conditional to see if any folding
8499 can then be done. Convert comparison to conditional for this purpose.
8500 The also optimizes non-constant cases that used to be done in
8503 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8504 one of the operands is a comparison and the other is a comparison, a
8505 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8506 code below would make the expression more complex. Change it to a
8507 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8508 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8510 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8511 || code == EQ_EXPR || code == NE_EXPR)
8512 && ((truth_value_p (TREE_CODE (arg0))
8513 && (truth_value_p (TREE_CODE (arg1))
8514 || (TREE_CODE (arg1) == BIT_AND_EXPR
8515 && integer_onep (TREE_OPERAND (arg1, 1)))))
8516 || (truth_value_p (TREE_CODE (arg1))
8517 && (truth_value_p (TREE_CODE (arg0))
8518 || (TREE_CODE (arg0) == BIT_AND_EXPR
8519 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8521 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8522 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8525 fold_convert (boolean_type_node, arg0),
8526 fold_convert (boolean_type_node, arg1));
8528 if (code == EQ_EXPR)
8529 tem = invert_truthvalue (tem);
8531 return fold_convert (type, tem);
8534 if (TREE_CODE_CLASS (code) == tcc_binary
8535 || TREE_CODE_CLASS (code) == tcc_comparison)
8537 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8538 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8539 fold_build2 (code, type,
8540 TREE_OPERAND (arg0, 1), op1));
8541 if (TREE_CODE (arg1) == COMPOUND_EXPR
8542 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8543 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8544 fold_build2 (code, type,
8545 op0, TREE_OPERAND (arg1, 1)));
8547 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8549 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8551 /*cond_first_p=*/1);
8552 if (tem != NULL_TREE)
8556 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8558 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8560 /*cond_first_p=*/0);
8561 if (tem != NULL_TREE)
8569 /* A + (-B) -> A - B */
8570 if (TREE_CODE (arg1) == NEGATE_EXPR)
8571 return fold_build2 (MINUS_EXPR, type,
8572 fold_convert (type, arg0),
8573 fold_convert (type, TREE_OPERAND (arg1, 0)));
8574 /* (-A) + B -> B - A */
8575 if (TREE_CODE (arg0) == NEGATE_EXPR
8576 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8577 return fold_build2 (MINUS_EXPR, type,
8578 fold_convert (type, arg1),
8579 fold_convert (type, TREE_OPERAND (arg0, 0)));
8580 /* Convert ~A + 1 to -A. */
8581 if (INTEGRAL_TYPE_P (type)
8582 && TREE_CODE (arg0) == BIT_NOT_EXPR
8583 && integer_onep (arg1))
8584 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8586 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8588 if ((TREE_CODE (arg0) == MULT_EXPR
8589 || TREE_CODE (arg1) == MULT_EXPR)
8590 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8592 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8597 if (! FLOAT_TYPE_P (type))
8599 if (integer_zerop (arg1))
8600 return non_lvalue (fold_convert (type, arg0));
8602 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8603 with a constant, and the two constants have no bits in common,
8604 we should treat this as a BIT_IOR_EXPR since this may produce more
8606 if (TREE_CODE (arg0) == BIT_AND_EXPR
8607 && TREE_CODE (arg1) == BIT_AND_EXPR
8608 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8609 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8610 && integer_zerop (const_binop (BIT_AND_EXPR,
8611 TREE_OPERAND (arg0, 1),
8612 TREE_OPERAND (arg1, 1), 0)))
8614 code = BIT_IOR_EXPR;
8618 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8619 (plus (plus (mult) (mult)) (foo)) so that we can
8620 take advantage of the factoring cases below. */
8621 if (((TREE_CODE (arg0) == PLUS_EXPR
8622 || TREE_CODE (arg0) == MINUS_EXPR)
8623 && TREE_CODE (arg1) == MULT_EXPR)
8624 || ((TREE_CODE (arg1) == PLUS_EXPR
8625 || TREE_CODE (arg1) == MINUS_EXPR)
8626 && TREE_CODE (arg0) == MULT_EXPR))
8628 tree parg0, parg1, parg, marg;
8629 enum tree_code pcode;
8631 if (TREE_CODE (arg1) == MULT_EXPR)
8632 parg = arg0, marg = arg1;
8634 parg = arg1, marg = arg0;
8635 pcode = TREE_CODE (parg);
8636 parg0 = TREE_OPERAND (parg, 0);
8637 parg1 = TREE_OPERAND (parg, 1);
8641 if (TREE_CODE (parg0) == MULT_EXPR
8642 && TREE_CODE (parg1) != MULT_EXPR)
8643 return fold_build2 (pcode, type,
8644 fold_build2 (PLUS_EXPR, type,
8645 fold_convert (type, parg0),
8646 fold_convert (type, marg)),
8647 fold_convert (type, parg1));
8648 if (TREE_CODE (parg0) != MULT_EXPR
8649 && TREE_CODE (parg1) == MULT_EXPR)
8650 return fold_build2 (PLUS_EXPR, type,
8651 fold_convert (type, parg0),
8652 fold_build2 (pcode, type,
8653 fold_convert (type, marg),
8658 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8659 of the array. Loop optimizer sometimes produce this type of
8661 if (TREE_CODE (arg0) == ADDR_EXPR)
8663 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8665 return fold_convert (type, tem);
8667 else if (TREE_CODE (arg1) == ADDR_EXPR)
8669 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8671 return fold_convert (type, tem);
8676 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8677 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8678 return non_lvalue (fold_convert (type, arg0));
8680 /* Likewise if the operands are reversed. */
8681 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8682 return non_lvalue (fold_convert (type, arg1));
8684 /* Convert X + -C into X - C. */
8685 if (TREE_CODE (arg1) == REAL_CST
8686 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8688 tem = fold_negate_const (arg1, type);
8689 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8690 return fold_build2 (MINUS_EXPR, type,
8691 fold_convert (type, arg0),
8692 fold_convert (type, tem));
8695 if (flag_unsafe_math_optimizations
8696 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8697 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8698 && (tem = distribute_real_division (code, type, arg0, arg1)))
8701 /* Convert x+x into x*2.0. */
8702 if (operand_equal_p (arg0, arg1, 0)
8703 && SCALAR_FLOAT_TYPE_P (type))
8704 return fold_build2 (MULT_EXPR, type, arg0,
8705 build_real (type, dconst2));
8707 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8708 if (flag_unsafe_math_optimizations
8709 && TREE_CODE (arg1) == PLUS_EXPR
8710 && TREE_CODE (arg0) != MULT_EXPR)
8712 tree tree10 = TREE_OPERAND (arg1, 0);
8713 tree tree11 = TREE_OPERAND (arg1, 1);
8714 if (TREE_CODE (tree11) == MULT_EXPR
8715 && TREE_CODE (tree10) == MULT_EXPR)
8718 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8719 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8722 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8723 if (flag_unsafe_math_optimizations
8724 && TREE_CODE (arg0) == PLUS_EXPR
8725 && TREE_CODE (arg1) != MULT_EXPR)
8727 tree tree00 = TREE_OPERAND (arg0, 0);
8728 tree tree01 = TREE_OPERAND (arg0, 1);
8729 if (TREE_CODE (tree01) == MULT_EXPR
8730 && TREE_CODE (tree00) == MULT_EXPR)
8733 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8734 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8740 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8741 is a rotate of A by C1 bits. */
8742 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8743 is a rotate of A by B bits. */
8745 enum tree_code code0, code1;
8746 code0 = TREE_CODE (arg0);
8747 code1 = TREE_CODE (arg1);
8748 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8749 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8750 && operand_equal_p (TREE_OPERAND (arg0, 0),
8751 TREE_OPERAND (arg1, 0), 0)
8752 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8754 tree tree01, tree11;
8755 enum tree_code code01, code11;
8757 tree01 = TREE_OPERAND (arg0, 1);
8758 tree11 = TREE_OPERAND (arg1, 1);
8759 STRIP_NOPS (tree01);
8760 STRIP_NOPS (tree11);
8761 code01 = TREE_CODE (tree01);
8762 code11 = TREE_CODE (tree11);
8763 if (code01 == INTEGER_CST
8764 && code11 == INTEGER_CST
8765 && TREE_INT_CST_HIGH (tree01) == 0
8766 && TREE_INT_CST_HIGH (tree11) == 0
8767 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8768 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8769 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8770 code0 == LSHIFT_EXPR ? tree01 : tree11);
8771 else if (code11 == MINUS_EXPR)
8773 tree tree110, tree111;
8774 tree110 = TREE_OPERAND (tree11, 0);
8775 tree111 = TREE_OPERAND (tree11, 1);
8776 STRIP_NOPS (tree110);
8777 STRIP_NOPS (tree111);
8778 if (TREE_CODE (tree110) == INTEGER_CST
8779 && 0 == compare_tree_int (tree110,
8781 (TREE_TYPE (TREE_OPERAND
8783 && operand_equal_p (tree01, tree111, 0))
8784 return build2 ((code0 == LSHIFT_EXPR
8787 type, TREE_OPERAND (arg0, 0), tree01);
8789 else if (code01 == MINUS_EXPR)
8791 tree tree010, tree011;
8792 tree010 = TREE_OPERAND (tree01, 0);
8793 tree011 = TREE_OPERAND (tree01, 1);
8794 STRIP_NOPS (tree010);
8795 STRIP_NOPS (tree011);
8796 if (TREE_CODE (tree010) == INTEGER_CST
8797 && 0 == compare_tree_int (tree010,
8799 (TREE_TYPE (TREE_OPERAND
8801 && operand_equal_p (tree11, tree011, 0))
8802 return build2 ((code0 != LSHIFT_EXPR
8805 type, TREE_OPERAND (arg0, 0), tree11);
8811 /* In most languages, can't associate operations on floats through
8812 parentheses. Rather than remember where the parentheses were, we
8813 don't associate floats at all, unless the user has specified
8814 -funsafe-math-optimizations. */
8816 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8818 tree var0, con0, lit0, minus_lit0;
8819 tree var1, con1, lit1, minus_lit1;
8821 /* Split both trees into variables, constants, and literals. Then
8822 associate each group together, the constants with literals,
8823 then the result with variables. This increases the chances of
8824 literals being recombined later and of generating relocatable
8825 expressions for the sum of a constant and literal. */
8826 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8827 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8828 code == MINUS_EXPR);
8830 /* Only do something if we found more than two objects. Otherwise,
8831 nothing has changed and we risk infinite recursion. */
8832 if (2 < ((var0 != 0) + (var1 != 0)
8833 + (con0 != 0) + (con1 != 0)
8834 + (lit0 != 0) + (lit1 != 0)
8835 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8837 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8838 if (code == MINUS_EXPR)
8841 var0 = associate_trees (var0, var1, code, type);
8842 con0 = associate_trees (con0, con1, code, type);
8843 lit0 = associate_trees (lit0, lit1, code, type);
8844 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8846 /* Preserve the MINUS_EXPR if the negative part of the literal is
8847 greater than the positive part. Otherwise, the multiplicative
8848 folding code (i.e extract_muldiv) may be fooled in case
8849 unsigned constants are subtracted, like in the following
8850 example: ((X*2 + 4) - 8U)/2. */
8851 if (minus_lit0 && lit0)
8853 if (TREE_CODE (lit0) == INTEGER_CST
8854 && TREE_CODE (minus_lit0) == INTEGER_CST
8855 && tree_int_cst_lt (lit0, minus_lit0))
8857 minus_lit0 = associate_trees (minus_lit0, lit0,
8863 lit0 = associate_trees (lit0, minus_lit0,
8871 return fold_convert (type,
8872 associate_trees (var0, minus_lit0,
8876 con0 = associate_trees (con0, minus_lit0,
8878 return fold_convert (type,
8879 associate_trees (var0, con0,
8884 con0 = associate_trees (con0, lit0, code, type);
8885 return fold_convert (type, associate_trees (var0, con0,
8893 /* A - (-B) -> A + B */
8894 if (TREE_CODE (arg1) == NEGATE_EXPR)
8895 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8896 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8897 if (TREE_CODE (arg0) == NEGATE_EXPR
8898 && (FLOAT_TYPE_P (type)
8899 || INTEGRAL_TYPE_P (type))
8900 && negate_expr_p (arg1)
8901 && reorder_operands_p (arg0, arg1))
8902 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8903 TREE_OPERAND (arg0, 0));
8904 /* Convert -A - 1 to ~A. */
8905 if (INTEGRAL_TYPE_P (type)
8906 && TREE_CODE (arg0) == NEGATE_EXPR
8907 && integer_onep (arg1))
8908 return fold_build1 (BIT_NOT_EXPR, type,
8909 fold_convert (type, TREE_OPERAND (arg0, 0)));
8911 /* Convert -1 - A to ~A. */
8912 if (INTEGRAL_TYPE_P (type)
8913 && integer_all_onesp (arg0))
8914 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8916 if (! FLOAT_TYPE_P (type))
8918 if (integer_zerop (arg0))
8919 return negate_expr (fold_convert (type, arg1));
8920 if (integer_zerop (arg1))
8921 return non_lvalue (fold_convert (type, arg0));
8923 /* Fold A - (A & B) into ~B & A. */
8924 if (!TREE_SIDE_EFFECTS (arg0)
8925 && TREE_CODE (arg1) == BIT_AND_EXPR)
8927 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8928 return fold_build2 (BIT_AND_EXPR, type,
8929 fold_build1 (BIT_NOT_EXPR, type,
8930 TREE_OPERAND (arg1, 0)),
8932 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8933 return fold_build2 (BIT_AND_EXPR, type,
8934 fold_build1 (BIT_NOT_EXPR, type,
8935 TREE_OPERAND (arg1, 1)),
8939 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8940 any power of 2 minus 1. */
8941 if (TREE_CODE (arg0) == BIT_AND_EXPR
8942 && TREE_CODE (arg1) == BIT_AND_EXPR
8943 && operand_equal_p (TREE_OPERAND (arg0, 0),
8944 TREE_OPERAND (arg1, 0), 0))
8946 tree mask0 = TREE_OPERAND (arg0, 1);
8947 tree mask1 = TREE_OPERAND (arg1, 1);
8948 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8950 if (operand_equal_p (tem, mask1, 0))
8952 tem = fold_build2 (BIT_XOR_EXPR, type,
8953 TREE_OPERAND (arg0, 0), mask1);
8954 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8959 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8960 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8961 return non_lvalue (fold_convert (type, arg0));
8963 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8964 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8965 (-ARG1 + ARG0) reduces to -ARG1. */
8966 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8967 return negate_expr (fold_convert (type, arg1));
8969 /* Fold &x - &x. This can happen from &x.foo - &x.
8970 This is unsafe for certain floats even in non-IEEE formats.
8971 In IEEE, it is unsafe because it does wrong for NaNs.
8972 Also note that operand_equal_p is always false if an operand
8975 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8976 && operand_equal_p (arg0, arg1, 0))
8977 return fold_convert (type, integer_zero_node);
8979 /* A - B -> A + (-B) if B is easily negatable. */
8980 if (negate_expr_p (arg1)
8981 && ((FLOAT_TYPE_P (type)
8982 /* Avoid this transformation if B is a positive REAL_CST. */
8983 && (TREE_CODE (arg1) != REAL_CST
8984 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8985 || INTEGRAL_TYPE_P (type)))
8986 return fold_build2 (PLUS_EXPR, type,
8987 fold_convert (type, arg0),
8988 fold_convert (type, negate_expr (arg1)));
8990 /* Try folding difference of addresses. */
8994 if ((TREE_CODE (arg0) == ADDR_EXPR
8995 || TREE_CODE (arg1) == ADDR_EXPR)
8996 && ptr_difference_const (arg0, arg1, &diff))
8997 return build_int_cst_type (type, diff);
9000 /* Fold &a[i] - &a[j] to i-j. */
9001 if (TREE_CODE (arg0) == ADDR_EXPR
9002 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9003 && TREE_CODE (arg1) == ADDR_EXPR
9004 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9006 tree aref0 = TREE_OPERAND (arg0, 0);
9007 tree aref1 = TREE_OPERAND (arg1, 0);
9008 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9009 TREE_OPERAND (aref1, 0), 0))
9011 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9012 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9013 tree esz = array_ref_element_size (aref0);
9014 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9015 return fold_build2 (MULT_EXPR, type, diff,
9016 fold_convert (type, esz));
9021 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9022 of the array. Loop optimizer sometimes produce this type of
9024 if (TREE_CODE (arg0) == ADDR_EXPR)
9026 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9028 return fold_convert (type, tem);
9031 if (flag_unsafe_math_optimizations
9032 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9033 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9034 && (tem = distribute_real_division (code, type, arg0, arg1)))
9037 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9039 if ((TREE_CODE (arg0) == MULT_EXPR
9040 || TREE_CODE (arg1) == MULT_EXPR)
9041 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9043 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9051 /* (-A) * (-B) -> A * B */
9052 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9053 return fold_build2 (MULT_EXPR, type,
9054 fold_convert (type, TREE_OPERAND (arg0, 0)),
9055 fold_convert (type, negate_expr (arg1)));
9056 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9057 return fold_build2 (MULT_EXPR, type,
9058 fold_convert (type, negate_expr (arg0)),
9059 fold_convert (type, TREE_OPERAND (arg1, 0)));
9061 if (! FLOAT_TYPE_P (type))
9063 if (integer_zerop (arg1))
9064 return omit_one_operand (type, arg1, arg0);
9065 if (integer_onep (arg1))
9066 return non_lvalue (fold_convert (type, arg0));
9067 /* Transform x * -1 into -x. */
9068 if (integer_all_onesp (arg1))
9069 return fold_convert (type, negate_expr (arg0));
9070 /* Transform x * -C into -x * C if x is easily negatable. */
9071 if (TREE_CODE (arg1) == INTEGER_CST
9072 && tree_int_cst_sgn (arg1) == -1
9073 && negate_expr_p (arg0)
9074 && (tem = negate_expr (arg1)) != arg1
9075 && !TREE_OVERFLOW (tem))
9076 return fold_build2 (MULT_EXPR, type,
9077 negate_expr (arg0), tem);
9079 /* (a * (1 << b)) is (a << b) */
9080 if (TREE_CODE (arg1) == LSHIFT_EXPR
9081 && integer_onep (TREE_OPERAND (arg1, 0)))
9082 return fold_build2 (LSHIFT_EXPR, type, arg0,
9083 TREE_OPERAND (arg1, 1));
9084 if (TREE_CODE (arg0) == LSHIFT_EXPR
9085 && integer_onep (TREE_OPERAND (arg0, 0)))
9086 return fold_build2 (LSHIFT_EXPR, type, arg1,
9087 TREE_OPERAND (arg0, 1));
9089 if (TREE_CODE (arg1) == INTEGER_CST
9090 && 0 != (tem = extract_muldiv (op0,
9091 fold_convert (type, arg1),
9093 return fold_convert (type, tem);
9095 /* Optimize z * conj(z) for integer complex numbers. */
9096 if (TREE_CODE (arg0) == CONJ_EXPR
9097 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9098 return fold_mult_zconjz (type, arg1);
9099 if (TREE_CODE (arg1) == CONJ_EXPR
9100 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9101 return fold_mult_zconjz (type, arg0);
9105 /* Maybe fold x * 0 to 0. The expressions aren't the same
9106 when x is NaN, since x * 0 is also NaN. Nor are they the
9107 same in modes with signed zeros, since multiplying a
9108 negative value by 0 gives -0, not +0. */
9109 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9110 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9111 && real_zerop (arg1))
9112 return omit_one_operand (type, arg1, arg0);
9113 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9114 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9115 && real_onep (arg1))
9116 return non_lvalue (fold_convert (type, arg0));
9118 /* Transform x * -1.0 into -x. */
9119 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9120 && real_minus_onep (arg1))
9121 return fold_convert (type, negate_expr (arg0));
9123 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9124 if (flag_unsafe_math_optimizations
9125 && TREE_CODE (arg0) == RDIV_EXPR
9126 && TREE_CODE (arg1) == REAL_CST
9127 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9129 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9132 return fold_build2 (RDIV_EXPR, type, tem,
9133 TREE_OPERAND (arg0, 1));
9136 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9137 if (operand_equal_p (arg0, arg1, 0))
9139 tree tem = fold_strip_sign_ops (arg0);
9140 if (tem != NULL_TREE)
9142 tem = fold_convert (type, tem);
9143 return fold_build2 (MULT_EXPR, type, tem, tem);
9147 /* Optimize z * conj(z) for floating point complex numbers.
9148 Guarded by flag_unsafe_math_optimizations as non-finite
9149 imaginary components don't produce scalar results. */
9150 if (flag_unsafe_math_optimizations
9151 && TREE_CODE (arg0) == CONJ_EXPR
9152 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9153 return fold_mult_zconjz (type, arg1);
9154 if (flag_unsafe_math_optimizations
9155 && TREE_CODE (arg1) == CONJ_EXPR
9156 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9157 return fold_mult_zconjz (type, arg0);
9159 if (flag_unsafe_math_optimizations)
9161 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9162 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9164 /* Optimizations of root(...)*root(...). */
9165 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9167 tree rootfn, arg, arglist;
9168 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9169 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9171 /* Optimize sqrt(x)*sqrt(x) as x. */
9172 if (BUILTIN_SQRT_P (fcode0)
9173 && operand_equal_p (arg00, arg10, 0)
9174 && ! HONOR_SNANS (TYPE_MODE (type)))
9177 /* Optimize root(x)*root(y) as root(x*y). */
9178 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9179 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9180 arglist = build_tree_list (NULL_TREE, arg);
9181 return build_function_call_expr (rootfn, arglist);
9184 /* Optimize expN(x)*expN(y) as expN(x+y). */
9185 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9187 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9188 tree arg = fold_build2 (PLUS_EXPR, type,
9189 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9190 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9191 tree arglist = build_tree_list (NULL_TREE, arg);
9192 return build_function_call_expr (expfn, arglist);
9195 /* Optimizations of pow(...)*pow(...). */
9196 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9197 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9198 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9200 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9201 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9203 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9204 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9207 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9208 if (operand_equal_p (arg01, arg11, 0))
9210 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9211 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9212 tree arglist = tree_cons (NULL_TREE, arg,
9213 build_tree_list (NULL_TREE,
9215 return build_function_call_expr (powfn, arglist);
9218 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9219 if (operand_equal_p (arg00, arg10, 0))
9221 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9222 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9223 tree arglist = tree_cons (NULL_TREE, arg00,
9224 build_tree_list (NULL_TREE,
9226 return build_function_call_expr (powfn, arglist);
9230 /* Optimize tan(x)*cos(x) as sin(x). */
9231 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9232 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9233 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9234 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9235 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9236 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9237 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9238 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9240 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9242 if (sinfn != NULL_TREE)
9243 return build_function_call_expr (sinfn,
9244 TREE_OPERAND (arg0, 1));
9247 /* Optimize x*pow(x,c) as pow(x,c+1). */
9248 if (fcode1 == BUILT_IN_POW
9249 || fcode1 == BUILT_IN_POWF
9250 || fcode1 == BUILT_IN_POWL)
9252 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9253 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9255 if (TREE_CODE (arg11) == REAL_CST
9256 && ! TREE_CONSTANT_OVERFLOW (arg11)
9257 && operand_equal_p (arg0, arg10, 0))
9259 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9263 c = TREE_REAL_CST (arg11);
9264 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9265 arg = build_real (type, c);
9266 arglist = build_tree_list (NULL_TREE, arg);
9267 arglist = tree_cons (NULL_TREE, arg0, arglist);
9268 return build_function_call_expr (powfn, arglist);
9272 /* Optimize pow(x,c)*x as pow(x,c+1). */
9273 if (fcode0 == BUILT_IN_POW
9274 || fcode0 == BUILT_IN_POWF
9275 || fcode0 == BUILT_IN_POWL)
9277 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9278 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9280 if (TREE_CODE (arg01) == REAL_CST
9281 && ! TREE_CONSTANT_OVERFLOW (arg01)
9282 && operand_equal_p (arg1, arg00, 0))
9284 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9288 c = TREE_REAL_CST (arg01);
9289 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9290 arg = build_real (type, c);
9291 arglist = build_tree_list (NULL_TREE, arg);
9292 arglist = tree_cons (NULL_TREE, arg1, arglist);
9293 return build_function_call_expr (powfn, arglist);
9297 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9299 && operand_equal_p (arg0, arg1, 0))
9301 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9305 tree arg = build_real (type, dconst2);
9306 tree arglist = build_tree_list (NULL_TREE, arg);
9307 arglist = tree_cons (NULL_TREE, arg0, arglist);
9308 return build_function_call_expr (powfn, arglist);
9317 if (integer_all_onesp (arg1))
9318 return omit_one_operand (type, arg1, arg0);
9319 if (integer_zerop (arg1))
9320 return non_lvalue (fold_convert (type, arg0));
9321 if (operand_equal_p (arg0, arg1, 0))
9322 return non_lvalue (fold_convert (type, arg0));
9325 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9326 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9328 t1 = build_int_cst (type, -1);
9329 t1 = force_fit_type (t1, 0, false, false);
9330 return omit_one_operand (type, t1, arg1);
9334 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9337 t1 = build_int_cst (type, -1);
9338 t1 = force_fit_type (t1, 0, false, false);
9339 return omit_one_operand (type, t1, arg0);
9342 /* Canonicalize (X & C1) | C2. */
9343 if (TREE_CODE (arg0) == BIT_AND_EXPR
9344 && TREE_CODE (arg1) == INTEGER_CST
9345 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9347 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9348 int width = TYPE_PRECISION (type);
9349 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9350 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9351 hi2 = TREE_INT_CST_HIGH (arg1);
9352 lo2 = TREE_INT_CST_LOW (arg1);
9354 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9355 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9356 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9358 if (width > HOST_BITS_PER_WIDE_INT)
9360 mhi = (unsigned HOST_WIDE_INT) -1
9361 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9367 mlo = (unsigned HOST_WIDE_INT) -1
9368 >> (HOST_BITS_PER_WIDE_INT - width);
9371 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9372 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9373 return fold_build2 (BIT_IOR_EXPR, type,
9374 TREE_OPERAND (arg0, 0), arg1);
9376 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9379 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9380 return fold_build2 (BIT_IOR_EXPR, type,
9381 fold_build2 (BIT_AND_EXPR, type,
9382 TREE_OPERAND (arg0, 0),
9383 build_int_cst_wide (type,
9389 /* (X & Y) | Y is (X, Y). */
9390 if (TREE_CODE (arg0) == BIT_AND_EXPR
9391 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9392 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9393 /* (X & Y) | X is (Y, X). */
9394 if (TREE_CODE (arg0) == BIT_AND_EXPR
9395 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9396 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9397 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9398 /* X | (X & Y) is (Y, X). */
9399 if (TREE_CODE (arg1) == BIT_AND_EXPR
9400 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9401 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9402 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9403 /* X | (Y & X) is (Y, X). */
9404 if (TREE_CODE (arg1) == BIT_AND_EXPR
9405 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9406 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9407 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9409 t1 = distribute_bit_expr (code, type, arg0, arg1);
9410 if (t1 != NULL_TREE)
9413 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9415 This results in more efficient code for machines without a NAND
9416 instruction. Combine will canonicalize to the first form
9417 which will allow use of NAND instructions provided by the
9418 backend if they exist. */
9419 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9420 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9422 return fold_build1 (BIT_NOT_EXPR, type,
9423 build2 (BIT_AND_EXPR, type,
9424 TREE_OPERAND (arg0, 0),
9425 TREE_OPERAND (arg1, 0)));
9428 /* See if this can be simplified into a rotate first. If that
9429 is unsuccessful continue in the association code. */
9433 if (integer_zerop (arg1))
9434 return non_lvalue (fold_convert (type, arg0));
9435 if (integer_all_onesp (arg1))
9436 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9437 if (operand_equal_p (arg0, arg1, 0))
9438 return omit_one_operand (type, integer_zero_node, arg0);
9441 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9442 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9444 t1 = build_int_cst (type, -1);
9445 t1 = force_fit_type (t1, 0, false, false);
9446 return omit_one_operand (type, t1, arg1);
9450 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9451 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9453 t1 = build_int_cst (type, -1);
9454 t1 = force_fit_type (t1, 0, false, false);
9455 return omit_one_operand (type, t1, arg0);
9458 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9459 with a constant, and the two constants have no bits in common,
9460 we should treat this as a BIT_IOR_EXPR since this may produce more
9462 if (TREE_CODE (arg0) == BIT_AND_EXPR
9463 && TREE_CODE (arg1) == BIT_AND_EXPR
9464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9465 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9466 && integer_zerop (const_binop (BIT_AND_EXPR,
9467 TREE_OPERAND (arg0, 1),
9468 TREE_OPERAND (arg1, 1), 0)))
9470 code = BIT_IOR_EXPR;
9474 /* (X | Y) ^ X -> Y & ~ X*/
9475 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9476 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9478 tree t2 = TREE_OPERAND (arg0, 1);
9479 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9481 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9482 fold_convert (type, t1));
9486 /* (Y | X) ^ X -> Y & ~ X*/
9487 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9488 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9490 tree t2 = TREE_OPERAND (arg0, 0);
9491 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9493 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9494 fold_convert (type, t1));
9498 /* X ^ (X | Y) -> Y & ~ X*/
9499 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9500 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9502 tree t2 = TREE_OPERAND (arg1, 1);
9503 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9505 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9506 fold_convert (type, t1));
9510 /* X ^ (Y | X) -> Y & ~ X*/
9511 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9512 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9514 tree t2 = TREE_OPERAND (arg1, 0);
9515 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9517 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9518 fold_convert (type, t1));
9522 /* Convert ~X ^ ~Y to X ^ Y. */
9523 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9524 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9525 return fold_build2 (code, type,
9526 fold_convert (type, TREE_OPERAND (arg0, 0)),
9527 fold_convert (type, TREE_OPERAND (arg1, 0)));
9529 /* Convert ~X ^ C to X ^ ~C. */
9530 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9531 && TREE_CODE (arg1) == INTEGER_CST)
9532 return fold_build2 (code, type,
9533 fold_convert (type, TREE_OPERAND (arg0, 0)),
9534 fold_build1 (BIT_NOT_EXPR, type, arg1));
9536 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9537 if (TREE_CODE (arg0) == BIT_AND_EXPR
9538 && integer_onep (TREE_OPERAND (arg0, 1))
9539 && integer_onep (arg1))
9540 return fold_build2 (EQ_EXPR, type, arg0,
9541 build_int_cst (TREE_TYPE (arg0), 0));
9543 /* Fold (X & Y) ^ Y as ~X & Y. */
9544 if (TREE_CODE (arg0) == BIT_AND_EXPR
9545 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9547 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9548 return fold_build2 (BIT_AND_EXPR, type,
9549 fold_build1 (BIT_NOT_EXPR, type, tem),
9550 fold_convert (type, arg1));
9552 /* Fold (X & Y) ^ X as ~Y & X. */
9553 if (TREE_CODE (arg0) == BIT_AND_EXPR
9554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9555 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9557 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9558 return fold_build2 (BIT_AND_EXPR, type,
9559 fold_build1 (BIT_NOT_EXPR, type, tem),
9560 fold_convert (type, arg1));
9562 /* Fold X ^ (X & Y) as X & ~Y. */
9563 if (TREE_CODE (arg1) == BIT_AND_EXPR
9564 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9566 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9567 return fold_build2 (BIT_AND_EXPR, type,
9568 fold_convert (type, arg0),
9569 fold_build1 (BIT_NOT_EXPR, type, tem));
9571 /* Fold X ^ (Y & X) as ~Y & X. */
9572 if (TREE_CODE (arg1) == BIT_AND_EXPR
9573 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9574 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9576 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9577 return fold_build2 (BIT_AND_EXPR, type,
9578 fold_build1 (BIT_NOT_EXPR, type, tem),
9579 fold_convert (type, arg0));
9582 /* See if this can be simplified into a rotate first. If that
9583 is unsuccessful continue in the association code. */
9587 if (integer_all_onesp (arg1))
9588 return non_lvalue (fold_convert (type, arg0));
9589 if (integer_zerop (arg1))
9590 return omit_one_operand (type, arg1, arg0);
9591 if (operand_equal_p (arg0, arg1, 0))
9592 return non_lvalue (fold_convert (type, arg0));
9594 /* ~X & X is always zero. */
9595 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9596 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9597 return omit_one_operand (type, integer_zero_node, arg1);
9599 /* X & ~X is always zero. */
9600 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9601 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9602 return omit_one_operand (type, integer_zero_node, arg0);
9604 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9605 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9606 && TREE_CODE (arg1) == INTEGER_CST
9607 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9608 return fold_build2 (BIT_IOR_EXPR, type,
9609 fold_build2 (BIT_AND_EXPR, type,
9610 TREE_OPERAND (arg0, 0), arg1),
9611 fold_build2 (BIT_AND_EXPR, type,
9612 TREE_OPERAND (arg0, 1), arg1));
9614 /* (X | Y) & Y is (X, Y). */
9615 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9616 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9617 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9618 /* (X | Y) & X is (Y, X). */
9619 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9620 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9621 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9622 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9623 /* X & (X | Y) is (Y, X). */
9624 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9625 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9626 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9627 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9628 /* X & (Y | X) is (Y, X). */
9629 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9630 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9631 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9632 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9634 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9635 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9636 && integer_onep (TREE_OPERAND (arg0, 1))
9637 && integer_onep (arg1))
9639 tem = TREE_OPERAND (arg0, 0);
9640 return fold_build2 (EQ_EXPR, type,
9641 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9642 build_int_cst (TREE_TYPE (tem), 1)),
9643 build_int_cst (TREE_TYPE (tem), 0));
9645 /* Fold ~X & 1 as (X & 1) == 0. */
9646 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9647 && integer_onep (arg1))
9649 tem = TREE_OPERAND (arg0, 0);
9650 return fold_build2 (EQ_EXPR, type,
9651 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9652 build_int_cst (TREE_TYPE (tem), 1)),
9653 build_int_cst (TREE_TYPE (tem), 0));
9656 /* Fold (X ^ Y) & Y as ~X & Y. */
9657 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9658 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9660 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9661 return fold_build2 (BIT_AND_EXPR, type,
9662 fold_build1 (BIT_NOT_EXPR, type, tem),
9663 fold_convert (type, arg1));
9665 /* Fold (X ^ Y) & X as ~Y & X. */
9666 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9667 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9668 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9670 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9671 return fold_build2 (BIT_AND_EXPR, type,
9672 fold_build1 (BIT_NOT_EXPR, type, tem),
9673 fold_convert (type, arg1));
9675 /* Fold X & (X ^ Y) as X & ~Y. */
9676 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9677 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9679 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9680 return fold_build2 (BIT_AND_EXPR, type,
9681 fold_convert (type, arg0),
9682 fold_build1 (BIT_NOT_EXPR, type, tem));
9684 /* Fold X & (Y ^ X) as ~Y & X. */
9685 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9686 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9687 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9689 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9690 return fold_build2 (BIT_AND_EXPR, type,
9691 fold_build1 (BIT_NOT_EXPR, type, tem),
9692 fold_convert (type, arg0));
9695 t1 = distribute_bit_expr (code, type, arg0, arg1);
9696 if (t1 != NULL_TREE)
9698 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9699 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9700 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9703 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9705 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9706 && (~TREE_INT_CST_LOW (arg1)
9707 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9708 return fold_convert (type, TREE_OPERAND (arg0, 0));
9711 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9713 This results in more efficient code for machines without a NOR
9714 instruction. Combine will canonicalize to the first form
9715 which will allow use of NOR instructions provided by the
9716 backend if they exist. */
9717 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9718 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9720 return fold_build1 (BIT_NOT_EXPR, type,
9721 build2 (BIT_IOR_EXPR, type,
9722 TREE_OPERAND (arg0, 0),
9723 TREE_OPERAND (arg1, 0)));
9729 /* Don't touch a floating-point divide by zero unless the mode
9730 of the constant can represent infinity. */
9731 if (TREE_CODE (arg1) == REAL_CST
9732 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9733 && real_zerop (arg1))
9736 /* Optimize A / A to 1.0 if we don't care about
9737 NaNs or Infinities. Skip the transformation
9738 for non-real operands. */
9739 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9740 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9741 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9742 && operand_equal_p (arg0, arg1, 0))
9744 tree r = build_real (TREE_TYPE (arg0), dconst1);
9746 return omit_two_operands (type, r, arg0, arg1);
9749 /* The complex version of the above A / A optimization. */
9750 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9751 && operand_equal_p (arg0, arg1, 0))
9753 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9754 if (! HONOR_NANS (TYPE_MODE (elem_type))
9755 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9757 tree r = build_real (elem_type, dconst1);
9758 /* omit_two_operands will call fold_convert for us. */
9759 return omit_two_operands (type, r, arg0, arg1);
9763 /* (-A) / (-B) -> A / B */
9764 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9765 return fold_build2 (RDIV_EXPR, type,
9766 TREE_OPERAND (arg0, 0),
9767 negate_expr (arg1));
9768 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9769 return fold_build2 (RDIV_EXPR, type,
9771 TREE_OPERAND (arg1, 0));
9773 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9774 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9775 && real_onep (arg1))
9776 return non_lvalue (fold_convert (type, arg0));
9778 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9779 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9780 && real_minus_onep (arg1))
9781 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9783 /* If ARG1 is a constant, we can convert this to a multiply by the
9784 reciprocal. This does not have the same rounding properties,
9785 so only do this if -funsafe-math-optimizations. We can actually
9786 always safely do it if ARG1 is a power of two, but it's hard to
9787 tell if it is or not in a portable manner. */
9788 if (TREE_CODE (arg1) == REAL_CST)
9790 if (flag_unsafe_math_optimizations
9791 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9793 return fold_build2 (MULT_EXPR, type, arg0, tem);
9794 /* Find the reciprocal if optimizing and the result is exact. */
9798 r = TREE_REAL_CST (arg1);
9799 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9801 tem = build_real (type, r);
9802 return fold_build2 (MULT_EXPR, type,
9803 fold_convert (type, arg0), tem);
9807 /* Convert A/B/C to A/(B*C). */
9808 if (flag_unsafe_math_optimizations
9809 && TREE_CODE (arg0) == RDIV_EXPR)
9810 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9811 fold_build2 (MULT_EXPR, type,
9812 TREE_OPERAND (arg0, 1), arg1));
9814 /* Convert A/(B/C) to (A/B)*C. */
9815 if (flag_unsafe_math_optimizations
9816 && TREE_CODE (arg1) == RDIV_EXPR)
9817 return fold_build2 (MULT_EXPR, type,
9818 fold_build2 (RDIV_EXPR, type, arg0,
9819 TREE_OPERAND (arg1, 0)),
9820 TREE_OPERAND (arg1, 1));
9822 /* Convert C1/(X*C2) into (C1/C2)/X. */
9823 if (flag_unsafe_math_optimizations
9824 && TREE_CODE (arg1) == MULT_EXPR
9825 && TREE_CODE (arg0) == REAL_CST
9826 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9828 tree tem = const_binop (RDIV_EXPR, arg0,
9829 TREE_OPERAND (arg1, 1), 0);
9831 return fold_build2 (RDIV_EXPR, type, tem,
9832 TREE_OPERAND (arg1, 0));
9835 if (flag_unsafe_math_optimizations)
9837 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9838 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9840 /* Optimize sin(x)/cos(x) as tan(x). */
9841 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9842 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9843 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9844 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9845 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9847 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9849 if (tanfn != NULL_TREE)
9850 return build_function_call_expr (tanfn,
9851 TREE_OPERAND (arg0, 1));
9854 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9855 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9856 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9857 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9858 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9859 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9861 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9863 if (tanfn != NULL_TREE)
9865 tree tmp = TREE_OPERAND (arg0, 1);
9866 tmp = build_function_call_expr (tanfn, tmp);
9867 return fold_build2 (RDIV_EXPR, type,
9868 build_real (type, dconst1), tmp);
9872 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9873 NaNs or Infinities. */
9874 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9875 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9876 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9878 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9879 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9881 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9882 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9883 && operand_equal_p (arg00, arg01, 0))
9885 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9887 if (cosfn != NULL_TREE)
9888 return build_function_call_expr (cosfn,
9889 TREE_OPERAND (arg0, 1));
9893 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9894 NaNs or Infinities. */
9895 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9896 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9897 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9899 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9900 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9902 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9903 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9904 && operand_equal_p (arg00, arg01, 0))
9906 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9908 if (cosfn != NULL_TREE)
9910 tree tmp = TREE_OPERAND (arg0, 1);
9911 tmp = build_function_call_expr (cosfn, tmp);
9912 return fold_build2 (RDIV_EXPR, type,
9913 build_real (type, dconst1),
9919 /* Optimize pow(x,c)/x as pow(x,c-1). */
9920 if (fcode0 == BUILT_IN_POW
9921 || fcode0 == BUILT_IN_POWF
9922 || fcode0 == BUILT_IN_POWL)
9924 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9925 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9926 if (TREE_CODE (arg01) == REAL_CST
9927 && ! TREE_CONSTANT_OVERFLOW (arg01)
9928 && operand_equal_p (arg1, arg00, 0))
9930 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9934 c = TREE_REAL_CST (arg01);
9935 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9936 arg = build_real (type, c);
9937 arglist = build_tree_list (NULL_TREE, arg);
9938 arglist = tree_cons (NULL_TREE, arg1, arglist);
9939 return build_function_call_expr (powfn, arglist);
9943 /* Optimize x/expN(y) into x*expN(-y). */
9944 if (BUILTIN_EXPONENT_P (fcode1))
9946 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9947 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9948 tree arglist = build_tree_list (NULL_TREE,
9949 fold_convert (type, arg));
9950 arg1 = build_function_call_expr (expfn, arglist);
9951 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9954 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9955 if (fcode1 == BUILT_IN_POW
9956 || fcode1 == BUILT_IN_POWF
9957 || fcode1 == BUILT_IN_POWL)
9959 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9960 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9961 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9962 tree neg11 = fold_convert (type, negate_expr (arg11));
9963 tree arglist = tree_cons(NULL_TREE, arg10,
9964 build_tree_list (NULL_TREE, neg11));
9965 arg1 = build_function_call_expr (powfn, arglist);
9966 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9971 case TRUNC_DIV_EXPR:
9972 case FLOOR_DIV_EXPR:
9973 /* Simplify A / (B << N) where A and B are positive and B is
9974 a power of 2, to A >> (N + log2(B)). */
9975 if (TREE_CODE (arg1) == LSHIFT_EXPR
9976 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9978 tree sval = TREE_OPERAND (arg1, 0);
9979 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9981 tree sh_cnt = TREE_OPERAND (arg1, 1);
9982 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9984 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9985 sh_cnt, build_int_cst (NULL_TREE, pow2));
9986 return fold_build2 (RSHIFT_EXPR, type,
9987 fold_convert (type, arg0), sh_cnt);
9992 case ROUND_DIV_EXPR:
9994 case EXACT_DIV_EXPR:
9995 if (integer_onep (arg1))
9996 return non_lvalue (fold_convert (type, arg0));
9997 if (integer_zerop (arg1))
10000 if (!TYPE_UNSIGNED (type)
10001 && TREE_CODE (arg1) == INTEGER_CST
10002 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10003 && TREE_INT_CST_HIGH (arg1) == -1)
10004 return fold_convert (type, negate_expr (arg0));
10006 /* Convert -A / -B to A / B when the type is signed and overflow is
10008 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10009 && TREE_CODE (arg0) == NEGATE_EXPR
10010 && negate_expr_p (arg1))
10011 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10012 negate_expr (arg1));
10013 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10014 && TREE_CODE (arg1) == NEGATE_EXPR
10015 && negate_expr_p (arg0))
10016 return fold_build2 (code, type, negate_expr (arg0),
10017 TREE_OPERAND (arg1, 0));
10019 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10020 operation, EXACT_DIV_EXPR.
10022 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10023 At one time others generated faster code, it's not clear if they do
10024 after the last round to changes to the DIV code in expmed.c. */
10025 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10026 && multiple_of_p (type, arg0, arg1))
10027 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10029 if (TREE_CODE (arg1) == INTEGER_CST
10030 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10031 return fold_convert (type, tem);
10035 case CEIL_MOD_EXPR:
10036 case FLOOR_MOD_EXPR:
10037 case ROUND_MOD_EXPR:
10038 case TRUNC_MOD_EXPR:
10039 /* X % 1 is always zero, but be sure to preserve any side
10041 if (integer_onep (arg1))
10042 return omit_one_operand (type, integer_zero_node, arg0);
10044 /* X % 0, return X % 0 unchanged so that we can get the
10045 proper warnings and errors. */
10046 if (integer_zerop (arg1))
10049 /* 0 % X is always zero, but be sure to preserve any side
10050 effects in X. Place this after checking for X == 0. */
10051 if (integer_zerop (arg0))
10052 return omit_one_operand (type, integer_zero_node, arg1);
10054 /* X % -1 is zero. */
10055 if (!TYPE_UNSIGNED (type)
10056 && TREE_CODE (arg1) == INTEGER_CST
10057 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10058 && TREE_INT_CST_HIGH (arg1) == -1)
10059 return omit_one_operand (type, integer_zero_node, arg0);
10061 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10062 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10063 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10064 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10067 /* Also optimize A % (C << N) where C is a power of 2,
10068 to A & ((C << N) - 1). */
10069 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10070 c = TREE_OPERAND (arg1, 0);
10072 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10074 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10075 build_int_cst (TREE_TYPE (arg1), 1));
10076 return fold_build2 (BIT_AND_EXPR, type,
10077 fold_convert (type, arg0),
10078 fold_convert (type, mask));
10082 /* X % -C is the same as X % C. */
10083 if (code == TRUNC_MOD_EXPR
10084 && !TYPE_UNSIGNED (type)
10085 && TREE_CODE (arg1) == INTEGER_CST
10086 && !TREE_CONSTANT_OVERFLOW (arg1)
10087 && TREE_INT_CST_HIGH (arg1) < 0
10089 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10090 && !sign_bit_p (arg1, arg1))
10091 return fold_build2 (code, type, fold_convert (type, arg0),
10092 fold_convert (type, negate_expr (arg1)));
10094 /* X % -Y is the same as X % Y. */
10095 if (code == TRUNC_MOD_EXPR
10096 && !TYPE_UNSIGNED (type)
10097 && TREE_CODE (arg1) == NEGATE_EXPR
10099 return fold_build2 (code, type, fold_convert (type, arg0),
10100 fold_convert (type, TREE_OPERAND (arg1, 0)));
10102 if (TREE_CODE (arg1) == INTEGER_CST
10103 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10104 return fold_convert (type, tem);
10110 if (integer_all_onesp (arg0))
10111 return omit_one_operand (type, arg0, arg1);
10115 /* Optimize -1 >> x for arithmetic right shifts. */
10116 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10117 return omit_one_operand (type, arg0, arg1);
10118 /* ... fall through ... */
10122 if (integer_zerop (arg1))
10123 return non_lvalue (fold_convert (type, arg0));
10124 if (integer_zerop (arg0))
10125 return omit_one_operand (type, arg0, arg1);
10127 /* Since negative shift count is not well-defined,
10128 don't try to compute it in the compiler. */
10129 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10132 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10133 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10134 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10135 && host_integerp (TREE_OPERAND (arg0, 1), false)
10136 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10138 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10139 + TREE_INT_CST_LOW (arg1));
10141 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10142 being well defined. */
10143 if (low >= TYPE_PRECISION (type))
10145 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10146 low = low % TYPE_PRECISION (type);
10147 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10148 return build_int_cst (type, 0);
10150 low = TYPE_PRECISION (type) - 1;
10153 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10154 build_int_cst (type, low));
10157 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10158 into x & ((unsigned)-1 >> c) for unsigned types. */
10159 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10160 || (TYPE_UNSIGNED (type)
10161 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10162 && host_integerp (arg1, false)
10163 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10164 && host_integerp (TREE_OPERAND (arg0, 1), false)
10165 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10167 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10168 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10174 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10176 lshift = build_int_cst (type, -1);
10177 lshift = int_const_binop (code, lshift, arg1, 0);
10179 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10183 /* Rewrite an LROTATE_EXPR by a constant into an
10184 RROTATE_EXPR by a new constant. */
10185 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10187 tree tem = build_int_cst (TREE_TYPE (arg1),
10188 GET_MODE_BITSIZE (TYPE_MODE (type)));
10189 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10190 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10193 /* If we have a rotate of a bit operation with the rotate count and
10194 the second operand of the bit operation both constant,
10195 permute the two operations. */
10196 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10197 && (TREE_CODE (arg0) == BIT_AND_EXPR
10198 || TREE_CODE (arg0) == BIT_IOR_EXPR
10199 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10200 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10201 return fold_build2 (TREE_CODE (arg0), type,
10202 fold_build2 (code, type,
10203 TREE_OPERAND (arg0, 0), arg1),
10204 fold_build2 (code, type,
10205 TREE_OPERAND (arg0, 1), arg1));
10207 /* Two consecutive rotates adding up to the width of the mode can
10209 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10210 && TREE_CODE (arg0) == RROTATE_EXPR
10211 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10212 && TREE_INT_CST_HIGH (arg1) == 0
10213 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10214 && ((TREE_INT_CST_LOW (arg1)
10215 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10216 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10217 return TREE_OPERAND (arg0, 0);
10222 if (operand_equal_p (arg0, arg1, 0))
10223 return omit_one_operand (type, arg0, arg1);
10224 if (INTEGRAL_TYPE_P (type)
10225 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10226 return omit_one_operand (type, arg1, arg0);
10227 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10233 if (operand_equal_p (arg0, arg1, 0))
10234 return omit_one_operand (type, arg0, arg1);
10235 if (INTEGRAL_TYPE_P (type)
10236 && TYPE_MAX_VALUE (type)
10237 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10238 return omit_one_operand (type, arg1, arg0);
10239 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10244 case TRUTH_ANDIF_EXPR:
10245 /* Note that the operands of this must be ints
10246 and their values must be 0 or 1.
10247 ("true" is a fixed value perhaps depending on the language.) */
10248 /* If first arg is constant zero, return it. */
10249 if (integer_zerop (arg0))
10250 return fold_convert (type, arg0);
10251 case TRUTH_AND_EXPR:
10252 /* If either arg is constant true, drop it. */
10253 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10254 return non_lvalue (fold_convert (type, arg1));
10255 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10256 /* Preserve sequence points. */
10257 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10258 return non_lvalue (fold_convert (type, arg0));
10259 /* If second arg is constant zero, result is zero, but first arg
10260 must be evaluated. */
10261 if (integer_zerop (arg1))
10262 return omit_one_operand (type, arg1, arg0);
10263 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10264 case will be handled here. */
10265 if (integer_zerop (arg0))
10266 return omit_one_operand (type, arg0, arg1);
10268 /* !X && X is always false. */
10269 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10270 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10271 return omit_one_operand (type, integer_zero_node, arg1);
10272 /* X && !X is always false. */
10273 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10274 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10275 return omit_one_operand (type, integer_zero_node, arg0);
10277 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10278 means A >= Y && A != MAX, but in this case we know that
10281 if (!TREE_SIDE_EFFECTS (arg0)
10282 && !TREE_SIDE_EFFECTS (arg1))
10284 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10285 if (tem && !operand_equal_p (tem, arg0, 0))
10286 return fold_build2 (code, type, tem, arg1);
10288 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10289 if (tem && !operand_equal_p (tem, arg1, 0))
10290 return fold_build2 (code, type, arg0, tem);
10294 /* We only do these simplifications if we are optimizing. */
10298 /* Check for things like (A || B) && (A || C). We can convert this
10299 to A || (B && C). Note that either operator can be any of the four
10300 truth and/or operations and the transformation will still be
10301 valid. Also note that we only care about order for the
10302 ANDIF and ORIF operators. If B contains side effects, this
10303 might change the truth-value of A. */
10304 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10305 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10306 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10307 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10308 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10309 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10311 tree a00 = TREE_OPERAND (arg0, 0);
10312 tree a01 = TREE_OPERAND (arg0, 1);
10313 tree a10 = TREE_OPERAND (arg1, 0);
10314 tree a11 = TREE_OPERAND (arg1, 1);
10315 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10316 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10317 && (code == TRUTH_AND_EXPR
10318 || code == TRUTH_OR_EXPR));
10320 if (operand_equal_p (a00, a10, 0))
10321 return fold_build2 (TREE_CODE (arg0), type, a00,
10322 fold_build2 (code, type, a01, a11));
10323 else if (commutative && operand_equal_p (a00, a11, 0))
10324 return fold_build2 (TREE_CODE (arg0), type, a00,
10325 fold_build2 (code, type, a01, a10));
10326 else if (commutative && operand_equal_p (a01, a10, 0))
10327 return fold_build2 (TREE_CODE (arg0), type, a01,
10328 fold_build2 (code, type, a00, a11));
10330 /* This case if tricky because we must either have commutative
10331 operators or else A10 must not have side-effects. */
10333 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10334 && operand_equal_p (a01, a11, 0))
10335 return fold_build2 (TREE_CODE (arg0), type,
10336 fold_build2 (code, type, a00, a10),
10340 /* See if we can build a range comparison. */
10341 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10344 /* Check for the possibility of merging component references. If our
10345 lhs is another similar operation, try to merge its rhs with our
10346 rhs. Then try to merge our lhs and rhs. */
10347 if (TREE_CODE (arg0) == code
10348 && 0 != (tem = fold_truthop (code, type,
10349 TREE_OPERAND (arg0, 1), arg1)))
10350 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10352 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10357 case TRUTH_ORIF_EXPR:
10358 /* Note that the operands of this must be ints
10359 and their values must be 0 or true.
10360 ("true" is a fixed value perhaps depending on the language.) */
10361 /* If first arg is constant true, return it. */
10362 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10363 return fold_convert (type, arg0);
10364 case TRUTH_OR_EXPR:
10365 /* If either arg is constant zero, drop it. */
10366 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10367 return non_lvalue (fold_convert (type, arg1));
10368 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10369 /* Preserve sequence points. */
10370 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10371 return non_lvalue (fold_convert (type, arg0));
10372 /* If second arg is constant true, result is true, but we must
10373 evaluate first arg. */
10374 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10375 return omit_one_operand (type, arg1, arg0);
10376 /* Likewise for first arg, but note this only occurs here for
10378 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10379 return omit_one_operand (type, arg0, arg1);
10381 /* !X || X is always true. */
10382 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10383 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10384 return omit_one_operand (type, integer_one_node, arg1);
10385 /* X || !X is always true. */
10386 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10387 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10388 return omit_one_operand (type, integer_one_node, arg0);
10392 case TRUTH_XOR_EXPR:
10393 /* If the second arg is constant zero, drop it. */
10394 if (integer_zerop (arg1))
10395 return non_lvalue (fold_convert (type, arg0));
10396 /* If the second arg is constant true, this is a logical inversion. */
10397 if (integer_onep (arg1))
10399 /* Only call invert_truthvalue if operand is a truth value. */
10400 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10401 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10403 tem = invert_truthvalue (arg0);
10404 return non_lvalue (fold_convert (type, tem));
10406 /* Identical arguments cancel to zero. */
10407 if (operand_equal_p (arg0, arg1, 0))
10408 return omit_one_operand (type, integer_zero_node, arg0);
10410 /* !X ^ X is always true. */
10411 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10412 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10413 return omit_one_operand (type, integer_one_node, arg1);
10415 /* X ^ !X is always true. */
10416 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10417 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10418 return omit_one_operand (type, integer_one_node, arg0);
10424 tem = fold_comparison (code, type, op0, op1);
10425 if (tem != NULL_TREE)
10428 /* bool_var != 0 becomes bool_var. */
10429 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10430 && code == NE_EXPR)
10431 return non_lvalue (fold_convert (type, arg0));
10433 /* bool_var == 1 becomes bool_var. */
10434 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10435 && code == EQ_EXPR)
10436 return non_lvalue (fold_convert (type, arg0));
10438 /* bool_var != 1 becomes !bool_var. */
10439 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10440 && code == NE_EXPR)
10441 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10443 /* bool_var == 0 becomes !bool_var. */
10444 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10445 && code == EQ_EXPR)
10446 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10448 /* If this is an equality comparison of the address of a non-weak
10449 object against zero, then we know the result. */
10450 if (TREE_CODE (arg0) == ADDR_EXPR
10451 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10452 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10453 && integer_zerop (arg1))
10454 return constant_boolean_node (code != EQ_EXPR, type);
10456 /* If this is an equality comparison of the address of two non-weak,
10457 unaliased symbols neither of which are extern (since we do not
10458 have access to attributes for externs), then we know the result. */
10459 if (TREE_CODE (arg0) == ADDR_EXPR
10460 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10461 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10462 && ! lookup_attribute ("alias",
10463 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10464 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10465 && TREE_CODE (arg1) == ADDR_EXPR
10466 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10467 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10468 && ! lookup_attribute ("alias",
10469 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10470 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10472 /* We know that we're looking at the address of two
10473 non-weak, unaliased, static _DECL nodes.
10475 It is both wasteful and incorrect to call operand_equal_p
10476 to compare the two ADDR_EXPR nodes. It is wasteful in that
10477 all we need to do is test pointer equality for the arguments
10478 to the two ADDR_EXPR nodes. It is incorrect to use
10479 operand_equal_p as that function is NOT equivalent to a
10480 C equality test. It can in fact return false for two
10481 objects which would test as equal using the C equality
10483 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10484 return constant_boolean_node (equal
10485 ? code == EQ_EXPR : code != EQ_EXPR,
10489 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10490 a MINUS_EXPR of a constant, we can convert it into a comparison with
10491 a revised constant as long as no overflow occurs. */
10492 if (TREE_CODE (arg1) == INTEGER_CST
10493 && (TREE_CODE (arg0) == PLUS_EXPR
10494 || TREE_CODE (arg0) == MINUS_EXPR)
10495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10496 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10497 ? MINUS_EXPR : PLUS_EXPR,
10498 fold_convert (TREE_TYPE (arg0), arg1),
10499 TREE_OPERAND (arg0, 1), 0))
10500 && ! TREE_CONSTANT_OVERFLOW (tem))
10501 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10503 /* Similarly for a NEGATE_EXPR. */
10504 if (TREE_CODE (arg0) == NEGATE_EXPR
10505 && TREE_CODE (arg1) == INTEGER_CST
10506 && 0 != (tem = negate_expr (arg1))
10507 && TREE_CODE (tem) == INTEGER_CST
10508 && ! TREE_CONSTANT_OVERFLOW (tem))
10509 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10511 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10512 for !=. Don't do this for ordered comparisons due to overflow. */
10513 if (TREE_CODE (arg0) == MINUS_EXPR
10514 && integer_zerop (arg1))
10515 return fold_build2 (code, type,
10516 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10518 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10519 if (TREE_CODE (arg0) == ABS_EXPR
10520 && (integer_zerop (arg1) || real_zerop (arg1)))
10521 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10523 /* If this is an EQ or NE comparison with zero and ARG0 is
10524 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10525 two operations, but the latter can be done in one less insn
10526 on machines that have only two-operand insns or on which a
10527 constant cannot be the first operand. */
10528 if (TREE_CODE (arg0) == BIT_AND_EXPR
10529 && integer_zerop (arg1))
10531 tree arg00 = TREE_OPERAND (arg0, 0);
10532 tree arg01 = TREE_OPERAND (arg0, 1);
10533 if (TREE_CODE (arg00) == LSHIFT_EXPR
10534 && integer_onep (TREE_OPERAND (arg00, 0)))
10536 fold_build2 (code, type,
10537 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10538 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10539 arg01, TREE_OPERAND (arg00, 1)),
10540 fold_convert (TREE_TYPE (arg0),
10541 integer_one_node)),
10543 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10544 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10546 fold_build2 (code, type,
10547 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10548 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10549 arg00, TREE_OPERAND (arg01, 1)),
10550 fold_convert (TREE_TYPE (arg0),
10551 integer_one_node)),
10555 /* If this is an NE or EQ comparison of zero against the result of a
10556 signed MOD operation whose second operand is a power of 2, make
10557 the MOD operation unsigned since it is simpler and equivalent. */
10558 if (integer_zerop (arg1)
10559 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10560 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10561 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10562 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10563 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10564 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10566 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10567 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10568 fold_convert (newtype,
10569 TREE_OPERAND (arg0, 0)),
10570 fold_convert (newtype,
10571 TREE_OPERAND (arg0, 1)));
10573 return fold_build2 (code, type, newmod,
10574 fold_convert (newtype, arg1));
10577 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10578 C1 is a valid shift constant, and C2 is a power of two, i.e.
10580 if (TREE_CODE (arg0) == BIT_AND_EXPR
10581 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10582 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10584 && integer_pow2p (TREE_OPERAND (arg0, 1))
10585 && integer_zerop (arg1))
10587 tree itype = TREE_TYPE (arg0);
10588 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10589 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10591 /* Check for a valid shift count. */
10592 if (TREE_INT_CST_HIGH (arg001) == 0
10593 && TREE_INT_CST_LOW (arg001) < prec)
10595 tree arg01 = TREE_OPERAND (arg0, 1);
10596 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10597 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10598 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10599 can be rewritten as (X & (C2 << C1)) != 0. */
10600 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10602 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10603 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10604 return fold_build2 (code, type, tem, arg1);
10606 /* Otherwise, for signed (arithmetic) shifts,
10607 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10608 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10609 else if (!TYPE_UNSIGNED (itype))
10610 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10611 arg000, build_int_cst (itype, 0));
10612 /* Otherwise, of unsigned (logical) shifts,
10613 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10614 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10616 return omit_one_operand (type,
10617 code == EQ_EXPR ? integer_one_node
10618 : integer_zero_node,
10623 /* If this is an NE comparison of zero with an AND of one, remove the
10624 comparison since the AND will give the correct value. */
10625 if (code == NE_EXPR
10626 && integer_zerop (arg1)
10627 && TREE_CODE (arg0) == BIT_AND_EXPR
10628 && integer_onep (TREE_OPERAND (arg0, 1)))
10629 return fold_convert (type, arg0);
10631 /* If we have (A & C) == C where C is a power of 2, convert this into
10632 (A & C) != 0. Similarly for NE_EXPR. */
10633 if (TREE_CODE (arg0) == BIT_AND_EXPR
10634 && integer_pow2p (TREE_OPERAND (arg0, 1))
10635 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10636 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10637 arg0, fold_convert (TREE_TYPE (arg0),
10638 integer_zero_node));
10640 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10641 bit, then fold the expression into A < 0 or A >= 0. */
10642 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10646 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10647 Similarly for NE_EXPR. */
10648 if (TREE_CODE (arg0) == BIT_AND_EXPR
10649 && TREE_CODE (arg1) == INTEGER_CST
10650 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10652 tree notc = fold_build1 (BIT_NOT_EXPR,
10653 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10654 TREE_OPERAND (arg0, 1));
10655 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10657 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10658 if (integer_nonzerop (dandnotc))
10659 return omit_one_operand (type, rslt, arg0);
10662 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10663 Similarly for NE_EXPR. */
10664 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10665 && TREE_CODE (arg1) == INTEGER_CST
10666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10668 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10669 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10670 TREE_OPERAND (arg0, 1), notd);
10671 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10672 if (integer_nonzerop (candnotd))
10673 return omit_one_operand (type, rslt, arg0);
10676 /* If this is a comparison of a field, we may be able to simplify it. */
10677 if (((TREE_CODE (arg0) == COMPONENT_REF
10678 && lang_hooks.can_use_bit_fields_p ())
10679 || TREE_CODE (arg0) == BIT_FIELD_REF)
10680 /* Handle the constant case even without -O
10681 to make sure the warnings are given. */
10682 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10684 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10689 /* Optimize comparisons of strlen vs zero to a compare of the
10690 first character of the string vs zero. To wit,
10691 strlen(ptr) == 0 => *ptr == 0
10692 strlen(ptr) != 0 => *ptr != 0
10693 Other cases should reduce to one of these two (or a constant)
10694 due to the return value of strlen being unsigned. */
10695 if (TREE_CODE (arg0) == CALL_EXPR
10696 && integer_zerop (arg1))
10698 tree fndecl = get_callee_fndecl (arg0);
10702 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10703 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10704 && (arglist = TREE_OPERAND (arg0, 1))
10705 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10706 && ! TREE_CHAIN (arglist))
10708 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10709 return fold_build2 (code, type, iref,
10710 build_int_cst (TREE_TYPE (iref), 0));
10714 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10715 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10716 if (TREE_CODE (arg0) == RSHIFT_EXPR
10717 && integer_zerop (arg1)
10718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10720 tree arg00 = TREE_OPERAND (arg0, 0);
10721 tree arg01 = TREE_OPERAND (arg0, 1);
10722 tree itype = TREE_TYPE (arg00);
10723 if (TREE_INT_CST_HIGH (arg01) == 0
10724 && TREE_INT_CST_LOW (arg01)
10725 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10727 if (TYPE_UNSIGNED (itype))
10729 itype = lang_hooks.types.signed_type (itype);
10730 arg00 = fold_convert (itype, arg00);
10732 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10733 type, arg00, build_int_cst (itype, 0));
10737 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10738 if (integer_zerop (arg1)
10739 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10740 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10741 TREE_OPERAND (arg0, 1));
10743 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10744 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10745 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10746 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10747 build_int_cst (TREE_TYPE (arg1), 0));
10748 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10749 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10750 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10751 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10752 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10753 build_int_cst (TREE_TYPE (arg1), 0));
10755 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10756 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10757 && TREE_CODE (arg1) == INTEGER_CST
10758 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10759 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10760 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10761 TREE_OPERAND (arg0, 1), arg1));
10763 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10764 (X & C) == 0 when C is a single bit. */
10765 if (TREE_CODE (arg0) == BIT_AND_EXPR
10766 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10767 && integer_zerop (arg1)
10768 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10770 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10771 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10772 TREE_OPERAND (arg0, 1));
10773 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10777 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10778 constant C is a power of two, i.e. a single bit. */
10779 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10780 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10781 && integer_zerop (arg1)
10782 && integer_pow2p (TREE_OPERAND (arg0, 1))
10783 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10784 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10786 tree arg00 = TREE_OPERAND (arg0, 0);
10787 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10788 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10791 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10792 when is C is a power of two, i.e. a single bit. */
10793 if (TREE_CODE (arg0) == BIT_AND_EXPR
10794 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10795 && integer_zerop (arg1)
10796 && integer_pow2p (TREE_OPERAND (arg0, 1))
10797 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10798 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10800 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10801 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10802 arg000, TREE_OPERAND (arg0, 1));
10803 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10804 tem, build_int_cst (TREE_TYPE (tem), 0));
10807 if (integer_zerop (arg1)
10808 && tree_expr_nonzero_p (arg0))
10810 tree res = constant_boolean_node (code==NE_EXPR, type);
10811 return omit_one_operand (type, res, arg0);
10814 /* Fold -X op -Y as X op Y, where op is eq/ne. */
10815 if (TREE_CODE (arg0) == NEGATE_EXPR
10816 && TREE_CODE (arg1) == NEGATE_EXPR)
10817 return fold_build2 (code, type,
10818 TREE_OPERAND (arg0, 0),
10819 TREE_OPERAND (arg1, 0));
10821 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10822 if (TREE_CODE (arg0) == BIT_AND_EXPR
10823 && TREE_CODE (arg1) == BIT_AND_EXPR)
10825 tree arg00 = TREE_OPERAND (arg0, 0);
10826 tree arg01 = TREE_OPERAND (arg0, 1);
10827 tree arg10 = TREE_OPERAND (arg1, 0);
10828 tree arg11 = TREE_OPERAND (arg1, 1);
10829 tree itype = TREE_TYPE (arg0);
10831 if (operand_equal_p (arg01, arg11, 0))
10832 return fold_build2 (code, type,
10833 fold_build2 (BIT_AND_EXPR, itype,
10834 fold_build2 (BIT_XOR_EXPR, itype,
10837 build_int_cst (itype, 0));
10839 if (operand_equal_p (arg01, arg10, 0))
10840 return fold_build2 (code, type,
10841 fold_build2 (BIT_AND_EXPR, itype,
10842 fold_build2 (BIT_XOR_EXPR, itype,
10845 build_int_cst (itype, 0));
10847 if (operand_equal_p (arg00, arg11, 0))
10848 return fold_build2 (code, type,
10849 fold_build2 (BIT_AND_EXPR, itype,
10850 fold_build2 (BIT_XOR_EXPR, itype,
10853 build_int_cst (itype, 0));
10855 if (operand_equal_p (arg00, arg10, 0))
10856 return fold_build2 (code, type,
10857 fold_build2 (BIT_AND_EXPR, itype,
10858 fold_build2 (BIT_XOR_EXPR, itype,
10861 build_int_cst (itype, 0));
10870 tem = fold_comparison (code, type, op0, op1);
10871 if (tem != NULL_TREE)
10874 /* Transform comparisons of the form X +- C CMP X. */
10875 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10876 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10877 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10878 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10879 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10880 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10881 && !(flag_wrapv || flag_trapv))))
10883 tree arg01 = TREE_OPERAND (arg0, 1);
10884 enum tree_code code0 = TREE_CODE (arg0);
10887 if (TREE_CODE (arg01) == REAL_CST)
10888 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10890 is_positive = tree_int_cst_sgn (arg01);
10892 /* (X - c) > X becomes false. */
10893 if (code == GT_EXPR
10894 && ((code0 == MINUS_EXPR && is_positive >= 0)
10895 || (code0 == PLUS_EXPR && is_positive <= 0)))
10896 return constant_boolean_node (0, type);
10898 /* Likewise (X + c) < X becomes false. */
10899 if (code == LT_EXPR
10900 && ((code0 == PLUS_EXPR && is_positive >= 0)
10901 || (code0 == MINUS_EXPR && is_positive <= 0)))
10902 return constant_boolean_node (0, type);
10904 /* Convert (X - c) <= X to true. */
10905 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10907 && ((code0 == MINUS_EXPR && is_positive >= 0)
10908 || (code0 == PLUS_EXPR && is_positive <= 0)))
10909 return constant_boolean_node (1, type);
10911 /* Convert (X + c) >= X to true. */
10912 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10914 && ((code0 == PLUS_EXPR && is_positive >= 0)
10915 || (code0 == MINUS_EXPR && is_positive <= 0)))
10916 return constant_boolean_node (1, type);
10918 if (TREE_CODE (arg01) == INTEGER_CST)
10920 /* Convert X + c > X and X - c < X to true for integers. */
10921 if (code == GT_EXPR
10922 && ((code0 == PLUS_EXPR && is_positive > 0)
10923 || (code0 == MINUS_EXPR && is_positive < 0)))
10924 return constant_boolean_node (1, type);
10926 if (code == LT_EXPR
10927 && ((code0 == MINUS_EXPR && is_positive > 0)
10928 || (code0 == PLUS_EXPR && is_positive < 0)))
10929 return constant_boolean_node (1, type);
10931 /* Convert X + c <= X and X - c >= X to false for integers. */
10932 if (code == LE_EXPR
10933 && ((code0 == PLUS_EXPR && is_positive > 0)
10934 || (code0 == MINUS_EXPR && is_positive < 0)))
10935 return constant_boolean_node (0, type);
10937 if (code == GE_EXPR
10938 && ((code0 == MINUS_EXPR && is_positive > 0)
10939 || (code0 == PLUS_EXPR && is_positive < 0)))
10940 return constant_boolean_node (0, type);
10944 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10945 This transformation affects the cases which are handled in later
10946 optimizations involving comparisons with non-negative constants. */
10947 if (TREE_CODE (arg1) == INTEGER_CST
10948 && TREE_CODE (arg0) != INTEGER_CST
10949 && tree_int_cst_sgn (arg1) > 0)
10951 if (code == GE_EXPR)
10953 arg1 = const_binop (MINUS_EXPR, arg1,
10954 build_int_cst (TREE_TYPE (arg1), 1), 0);
10955 return fold_build2 (GT_EXPR, type, arg0,
10956 fold_convert (TREE_TYPE (arg0), arg1));
10958 if (code == LT_EXPR)
10960 arg1 = const_binop (MINUS_EXPR, arg1,
10961 build_int_cst (TREE_TYPE (arg1), 1), 0);
10962 return fold_build2 (LE_EXPR, type, arg0,
10963 fold_convert (TREE_TYPE (arg0), arg1));
10967 /* Comparisons with the highest or lowest possible integer of
10968 the specified size will have known values. */
10970 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10972 if (TREE_CODE (arg1) == INTEGER_CST
10973 && ! TREE_CONSTANT_OVERFLOW (arg1)
10974 && width <= 2 * HOST_BITS_PER_WIDE_INT
10975 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10976 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10978 HOST_WIDE_INT signed_max_hi;
10979 unsigned HOST_WIDE_INT signed_max_lo;
10980 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10982 if (width <= HOST_BITS_PER_WIDE_INT)
10984 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10989 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10991 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10997 max_lo = signed_max_lo;
10998 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11004 width -= HOST_BITS_PER_WIDE_INT;
11005 signed_max_lo = -1;
11006 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11011 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11013 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11018 max_hi = signed_max_hi;
11019 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11023 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11024 && TREE_INT_CST_LOW (arg1) == max_lo)
11028 return omit_one_operand (type, integer_zero_node, arg0);
11031 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11034 return omit_one_operand (type, integer_one_node, arg0);
11037 return fold_build2 (NE_EXPR, type, arg0, arg1);
11039 /* The GE_EXPR and LT_EXPR cases above are not normally
11040 reached because of previous transformations. */
11045 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11047 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11051 arg1 = const_binop (PLUS_EXPR, arg1,
11052 build_int_cst (TREE_TYPE (arg1), 1), 0);
11053 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11055 arg1 = const_binop (PLUS_EXPR, arg1,
11056 build_int_cst (TREE_TYPE (arg1), 1), 0);
11057 return fold_build2 (NE_EXPR, type, arg0, arg1);
11061 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11063 && TREE_INT_CST_LOW (arg1) == min_lo)
11067 return omit_one_operand (type, integer_zero_node, arg0);
11070 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11073 return omit_one_operand (type, integer_one_node, arg0);
11076 return fold_build2 (NE_EXPR, type, op0, op1);
11081 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11083 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11087 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11088 return fold_build2 (NE_EXPR, type, arg0, arg1);
11090 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11091 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11096 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11097 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11098 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11099 /* signed_type does not work on pointer types. */
11100 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11102 /* The following case also applies to X < signed_max+1
11103 and X >= signed_max+1 because previous transformations. */
11104 if (code == LE_EXPR || code == GT_EXPR)
11107 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11108 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11109 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11110 type, fold_convert (st0, arg0),
11111 build_int_cst (st1, 0));
11117 /* If we are comparing an ABS_EXPR with a constant, we can
11118 convert all the cases into explicit comparisons, but they may
11119 well not be faster than doing the ABS and one comparison.
11120 But ABS (X) <= C is a range comparison, which becomes a subtraction
11121 and a comparison, and is probably faster. */
11122 if (code == LE_EXPR
11123 && TREE_CODE (arg1) == INTEGER_CST
11124 && TREE_CODE (arg0) == ABS_EXPR
11125 && ! TREE_SIDE_EFFECTS (arg0)
11126 && (0 != (tem = negate_expr (arg1)))
11127 && TREE_CODE (tem) == INTEGER_CST
11128 && ! TREE_CONSTANT_OVERFLOW (tem))
11129 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11130 build2 (GE_EXPR, type,
11131 TREE_OPERAND (arg0, 0), tem),
11132 build2 (LE_EXPR, type,
11133 TREE_OPERAND (arg0, 0), arg1));
11135 /* Convert ABS_EXPR<x> >= 0 to true. */
11136 if (code == GE_EXPR
11137 && tree_expr_nonnegative_p (arg0)
11138 && (integer_zerop (arg1)
11139 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11140 && real_zerop (arg1))))
11141 return omit_one_operand (type, integer_one_node, arg0);
11143 /* Convert ABS_EXPR<x> < 0 to false. */
11144 if (code == LT_EXPR
11145 && tree_expr_nonnegative_p (arg0)
11146 && (integer_zerop (arg1) || real_zerop (arg1)))
11147 return omit_one_operand (type, integer_zero_node, arg0);
11149 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11150 and similarly for >= into !=. */
11151 if ((code == LT_EXPR || code == GE_EXPR)
11152 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11153 && TREE_CODE (arg1) == LSHIFT_EXPR
11154 && integer_onep (TREE_OPERAND (arg1, 0)))
11155 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11156 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11157 TREE_OPERAND (arg1, 1)),
11158 build_int_cst (TREE_TYPE (arg0), 0));
11160 if ((code == LT_EXPR || code == GE_EXPR)
11161 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11162 && (TREE_CODE (arg1) == NOP_EXPR
11163 || TREE_CODE (arg1) == CONVERT_EXPR)
11164 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11165 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11167 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11168 fold_convert (TREE_TYPE (arg0),
11169 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11170 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11172 build_int_cst (TREE_TYPE (arg0), 0));
11176 case UNORDERED_EXPR:
11184 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11186 t1 = fold_relational_const (code, type, arg0, arg1);
11187 if (t1 != NULL_TREE)
11191 /* If the first operand is NaN, the result is constant. */
11192 if (TREE_CODE (arg0) == REAL_CST
11193 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11194 && (code != LTGT_EXPR || ! flag_trapping_math))
11196 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11197 ? integer_zero_node
11198 : integer_one_node;
11199 return omit_one_operand (type, t1, arg1);
11202 /* If the second operand is NaN, the result is constant. */
11203 if (TREE_CODE (arg1) == REAL_CST
11204 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11205 && (code != LTGT_EXPR || ! flag_trapping_math))
11207 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11208 ? integer_zero_node
11209 : integer_one_node;
11210 return omit_one_operand (type, t1, arg0);
11213 /* Simplify unordered comparison of something with itself. */
11214 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11215 && operand_equal_p (arg0, arg1, 0))
11216 return constant_boolean_node (1, type);
11218 if (code == LTGT_EXPR
11219 && !flag_trapping_math
11220 && operand_equal_p (arg0, arg1, 0))
11221 return constant_boolean_node (0, type);
11223 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11225 tree targ0 = strip_float_extensions (arg0);
11226 tree targ1 = strip_float_extensions (arg1);
11227 tree newtype = TREE_TYPE (targ0);
11229 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11230 newtype = TREE_TYPE (targ1);
11232 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11233 return fold_build2 (code, type, fold_convert (newtype, targ0),
11234 fold_convert (newtype, targ1));
11239 case COMPOUND_EXPR:
11240 /* When pedantic, a compound expression can be neither an lvalue
11241 nor an integer constant expression. */
11242 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11244 /* Don't let (0, 0) be null pointer constant. */
11245 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11246 : fold_convert (type, arg1);
11247 return pedantic_non_lvalue (tem);
11250 if ((TREE_CODE (arg0) == REAL_CST
11251 && TREE_CODE (arg1) == REAL_CST)
11252 || (TREE_CODE (arg0) == INTEGER_CST
11253 && TREE_CODE (arg1) == INTEGER_CST))
11254 return build_complex (type, arg0, arg1);
11258 /* An ASSERT_EXPR should never be passed to fold_binary. */
11259 gcc_unreachable ();
11263 } /* switch (code) */
11266 /* Callback for walk_tree, looking for LABEL_EXPR.
11267 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11268 Do not check the sub-tree of GOTO_EXPR. */
11271 contains_label_1 (tree *tp,
11272 int *walk_subtrees,
11273 void *data ATTRIBUTE_UNUSED)
11275 switch (TREE_CODE (*tp))
11280 *walk_subtrees = 0;
11287 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11288 accessible from outside the sub-tree. Returns NULL_TREE if no
11289 addressable label is found. */
11292 contains_label_p (tree st)
11294 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11297 /* Fold a ternary expression of code CODE and type TYPE with operands
11298 OP0, OP1, and OP2. Return the folded expression if folding is
11299 successful. Otherwise, return NULL_TREE. */
11302 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11305 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11306 enum tree_code_class kind = TREE_CODE_CLASS (code);
11308 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11309 && TREE_CODE_LENGTH (code) == 3);
11311 /* Strip any conversions that don't change the mode. This is safe
11312 for every expression, except for a comparison expression because
11313 its signedness is derived from its operands. So, in the latter
11314 case, only strip conversions that don't change the signedness.
11316 Note that this is done as an internal manipulation within the
11317 constant folder, in order to find the simplest representation of
11318 the arguments so that their form can be studied. In any cases,
11319 the appropriate type conversions should be put back in the tree
11320 that will get out of the constant folder. */
11335 case COMPONENT_REF:
11336 if (TREE_CODE (arg0) == CONSTRUCTOR
11337 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11339 unsigned HOST_WIDE_INT idx;
11341 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11348 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11349 so all simple results must be passed through pedantic_non_lvalue. */
11350 if (TREE_CODE (arg0) == INTEGER_CST)
11352 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11353 tem = integer_zerop (arg0) ? op2 : op1;
11354 /* Only optimize constant conditions when the selected branch
11355 has the same type as the COND_EXPR. This avoids optimizing
11356 away "c ? x : throw", where the throw has a void type.
11357 Avoid throwing away that operand which contains label. */
11358 if ((!TREE_SIDE_EFFECTS (unused_op)
11359 || !contains_label_p (unused_op))
11360 && (! VOID_TYPE_P (TREE_TYPE (tem))
11361 || VOID_TYPE_P (type)))
11362 return pedantic_non_lvalue (tem);
11365 if (operand_equal_p (arg1, op2, 0))
11366 return pedantic_omit_one_operand (type, arg1, arg0);
11368 /* If we have A op B ? A : C, we may be able to convert this to a
11369 simpler expression, depending on the operation and the values
11370 of B and C. Signed zeros prevent all of these transformations,
11371 for reasons given above each one.
11373 Also try swapping the arguments and inverting the conditional. */
11374 if (COMPARISON_CLASS_P (arg0)
11375 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11376 arg1, TREE_OPERAND (arg0, 1))
11377 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11379 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11384 if (COMPARISON_CLASS_P (arg0)
11385 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11387 TREE_OPERAND (arg0, 1))
11388 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11390 tem = fold_truth_not_expr (arg0);
11391 if (tem && COMPARISON_CLASS_P (tem))
11393 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11399 /* If the second operand is simpler than the third, swap them
11400 since that produces better jump optimization results. */
11401 if (truth_value_p (TREE_CODE (arg0))
11402 && tree_swap_operands_p (op1, op2, false))
11404 /* See if this can be inverted. If it can't, possibly because
11405 it was a floating-point inequality comparison, don't do
11407 tem = fold_truth_not_expr (arg0);
11409 return fold_build3 (code, type, tem, op2, op1);
11412 /* Convert A ? 1 : 0 to simply A. */
11413 if (integer_onep (op1)
11414 && integer_zerop (op2)
11415 /* If we try to convert OP0 to our type, the
11416 call to fold will try to move the conversion inside
11417 a COND, which will recurse. In that case, the COND_EXPR
11418 is probably the best choice, so leave it alone. */
11419 && type == TREE_TYPE (arg0))
11420 return pedantic_non_lvalue (arg0);
11422 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11423 over COND_EXPR in cases such as floating point comparisons. */
11424 if (integer_zerop (op1)
11425 && integer_onep (op2)
11426 && truth_value_p (TREE_CODE (arg0)))
11427 return pedantic_non_lvalue (fold_convert (type,
11428 invert_truthvalue (arg0)));
11430 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11431 if (TREE_CODE (arg0) == LT_EXPR
11432 && integer_zerop (TREE_OPERAND (arg0, 1))
11433 && integer_zerop (op2)
11434 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11436 /* sign_bit_p only checks ARG1 bits within A's precision.
11437 If <sign bit of A> has wider type than A, bits outside
11438 of A's precision in <sign bit of A> need to be checked.
11439 If they are all 0, this optimization needs to be done
11440 in unsigned A's type, if they are all 1 in signed A's type,
11441 otherwise this can't be done. */
11442 if (TYPE_PRECISION (TREE_TYPE (tem))
11443 < TYPE_PRECISION (TREE_TYPE (arg1))
11444 && TYPE_PRECISION (TREE_TYPE (tem))
11445 < TYPE_PRECISION (type))
11447 unsigned HOST_WIDE_INT mask_lo;
11448 HOST_WIDE_INT mask_hi;
11449 int inner_width, outer_width;
11452 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11453 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11454 if (outer_width > TYPE_PRECISION (type))
11455 outer_width = TYPE_PRECISION (type);
11457 if (outer_width > HOST_BITS_PER_WIDE_INT)
11459 mask_hi = ((unsigned HOST_WIDE_INT) -1
11460 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11466 mask_lo = ((unsigned HOST_WIDE_INT) -1
11467 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11469 if (inner_width > HOST_BITS_PER_WIDE_INT)
11471 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11472 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11476 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11477 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11479 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11480 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11482 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11483 tem = fold_convert (tem_type, tem);
11485 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11486 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11488 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11489 tem = fold_convert (tem_type, tem);
11496 return fold_convert (type,
11497 fold_build2 (BIT_AND_EXPR,
11498 TREE_TYPE (tem), tem,
11499 fold_convert (TREE_TYPE (tem),
11503 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11504 already handled above. */
11505 if (TREE_CODE (arg0) == BIT_AND_EXPR
11506 && integer_onep (TREE_OPERAND (arg0, 1))
11507 && integer_zerop (op2)
11508 && integer_pow2p (arg1))
11510 tree tem = TREE_OPERAND (arg0, 0);
11512 if (TREE_CODE (tem) == RSHIFT_EXPR
11513 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11514 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11515 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11516 return fold_build2 (BIT_AND_EXPR, type,
11517 TREE_OPERAND (tem, 0), arg1);
11520 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11521 is probably obsolete because the first operand should be a
11522 truth value (that's why we have the two cases above), but let's
11523 leave it in until we can confirm this for all front-ends. */
11524 if (integer_zerop (op2)
11525 && TREE_CODE (arg0) == NE_EXPR
11526 && integer_zerop (TREE_OPERAND (arg0, 1))
11527 && integer_pow2p (arg1)
11528 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11529 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11530 arg1, OEP_ONLY_CONST))
11531 return pedantic_non_lvalue (fold_convert (type,
11532 TREE_OPERAND (arg0, 0)));
11534 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11535 if (integer_zerop (op2)
11536 && truth_value_p (TREE_CODE (arg0))
11537 && truth_value_p (TREE_CODE (arg1)))
11538 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11539 fold_convert (type, arg0),
11542 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11543 if (integer_onep (op2)
11544 && truth_value_p (TREE_CODE (arg0))
11545 && truth_value_p (TREE_CODE (arg1)))
11547 /* Only perform transformation if ARG0 is easily inverted. */
11548 tem = fold_truth_not_expr (arg0);
11550 return fold_build2 (TRUTH_ORIF_EXPR, type,
11551 fold_convert (type, tem),
11555 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11556 if (integer_zerop (arg1)
11557 && truth_value_p (TREE_CODE (arg0))
11558 && truth_value_p (TREE_CODE (op2)))
11560 /* Only perform transformation if ARG0 is easily inverted. */
11561 tem = fold_truth_not_expr (arg0);
11563 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11564 fold_convert (type, tem),
11568 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11569 if (integer_onep (arg1)
11570 && truth_value_p (TREE_CODE (arg0))
11571 && truth_value_p (TREE_CODE (op2)))
11572 return fold_build2 (TRUTH_ORIF_EXPR, type,
11573 fold_convert (type, arg0),
11579 /* Check for a built-in function. */
11580 if (TREE_CODE (op0) == ADDR_EXPR
11581 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11582 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11583 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11586 case BIT_FIELD_REF:
11587 if (TREE_CODE (arg0) == VECTOR_CST
11588 && type == TREE_TYPE (TREE_TYPE (arg0))
11589 && host_integerp (arg1, 1)
11590 && host_integerp (op2, 1))
11592 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11593 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11596 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11597 && (idx % width) == 0
11598 && (idx = idx / width)
11599 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11601 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11602 while (idx-- > 0 && elements)
11603 elements = TREE_CHAIN (elements);
11605 return TREE_VALUE (elements);
11607 return fold_convert (type, integer_zero_node);
11614 } /* switch (code) */
11617 /* Perform constant folding and related simplification of EXPR.
11618 The related simplifications include x*1 => x, x*0 => 0, etc.,
11619 and application of the associative law.
11620 NOP_EXPR conversions may be removed freely (as long as we
11621 are careful not to change the type of the overall expression).
11622 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11623 but we can constant-fold them if they have constant operands. */
11625 #ifdef ENABLE_FOLD_CHECKING
11626 # define fold(x) fold_1 (x)
11627 static tree fold_1 (tree);
11633 const tree t = expr;
11634 enum tree_code code = TREE_CODE (t);
11635 enum tree_code_class kind = TREE_CODE_CLASS (code);
11638 /* Return right away if a constant. */
11639 if (kind == tcc_constant)
11642 if (IS_EXPR_CODE_CLASS (kind))
11644 tree type = TREE_TYPE (t);
11645 tree op0, op1, op2;
11647 switch (TREE_CODE_LENGTH (code))
11650 op0 = TREE_OPERAND (t, 0);
11651 tem = fold_unary (code, type, op0);
11652 return tem ? tem : expr;
11654 op0 = TREE_OPERAND (t, 0);
11655 op1 = TREE_OPERAND (t, 1);
11656 tem = fold_binary (code, type, op0, op1);
11657 return tem ? tem : expr;
11659 op0 = TREE_OPERAND (t, 0);
11660 op1 = TREE_OPERAND (t, 1);
11661 op2 = TREE_OPERAND (t, 2);
11662 tem = fold_ternary (code, type, op0, op1, op2);
11663 return tem ? tem : expr;
11672 return fold (DECL_INITIAL (t));
11676 } /* switch (code) */
11679 #ifdef ENABLE_FOLD_CHECKING
11682 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11683 static void fold_check_failed (tree, tree);
11684 void print_fold_checksum (tree);
11686 /* When --enable-checking=fold, compute a digest of expr before
11687 and after actual fold call to see if fold did not accidentally
11688 change original expr. */
11694 struct md5_ctx ctx;
11695 unsigned char checksum_before[16], checksum_after[16];
11698 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11699 md5_init_ctx (&ctx);
11700 fold_checksum_tree (expr, &ctx, ht);
11701 md5_finish_ctx (&ctx, checksum_before);
11704 ret = fold_1 (expr);
11706 md5_init_ctx (&ctx);
11707 fold_checksum_tree (expr, &ctx, ht);
11708 md5_finish_ctx (&ctx, checksum_after);
11711 if (memcmp (checksum_before, checksum_after, 16))
11712 fold_check_failed (expr, ret);
11718 print_fold_checksum (tree expr)
11720 struct md5_ctx ctx;
11721 unsigned char checksum[16], cnt;
11724 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11725 md5_init_ctx (&ctx);
11726 fold_checksum_tree (expr, &ctx, ht);
11727 md5_finish_ctx (&ctx, checksum);
11729 for (cnt = 0; cnt < 16; ++cnt)
11730 fprintf (stderr, "%02x", checksum[cnt]);
11731 putc ('\n', stderr);
11735 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11737 internal_error ("fold check: original tree changed by fold");
11741 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11744 enum tree_code code;
11745 struct tree_function_decl buf;
11750 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11751 <= sizeof (struct tree_function_decl))
11752 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11755 slot = htab_find_slot (ht, expr, INSERT);
11759 code = TREE_CODE (expr);
11760 if (TREE_CODE_CLASS (code) == tcc_declaration
11761 && DECL_ASSEMBLER_NAME_SET_P (expr))
11763 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11764 memcpy ((char *) &buf, expr, tree_size (expr));
11765 expr = (tree) &buf;
11766 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11768 else if (TREE_CODE_CLASS (code) == tcc_type
11769 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11770 || TYPE_CACHED_VALUES_P (expr)
11771 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11773 /* Allow these fields to be modified. */
11774 memcpy ((char *) &buf, expr, tree_size (expr));
11775 expr = (tree) &buf;
11776 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11777 TYPE_POINTER_TO (expr) = NULL;
11778 TYPE_REFERENCE_TO (expr) = NULL;
11779 if (TYPE_CACHED_VALUES_P (expr))
11781 TYPE_CACHED_VALUES_P (expr) = 0;
11782 TYPE_CACHED_VALUES (expr) = NULL;
11785 md5_process_bytes (expr, tree_size (expr), ctx);
11786 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11787 if (TREE_CODE_CLASS (code) != tcc_type
11788 && TREE_CODE_CLASS (code) != tcc_declaration
11789 && code != TREE_LIST)
11790 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11791 switch (TREE_CODE_CLASS (code))
11797 md5_process_bytes (TREE_STRING_POINTER (expr),
11798 TREE_STRING_LENGTH (expr), ctx);
11801 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11802 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11805 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11811 case tcc_exceptional:
11815 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11816 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11817 expr = TREE_CHAIN (expr);
11818 goto recursive_label;
11821 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11822 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11828 case tcc_expression:
11829 case tcc_reference:
11830 case tcc_comparison:
11833 case tcc_statement:
11834 len = TREE_CODE_LENGTH (code);
11835 for (i = 0; i < len; ++i)
11836 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11838 case tcc_declaration:
11839 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11840 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11841 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11843 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11844 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11845 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11846 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11847 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11849 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11850 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11852 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11854 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11855 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11856 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11860 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11861 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11862 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11863 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11864 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11865 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11866 if (INTEGRAL_TYPE_P (expr)
11867 || SCALAR_FLOAT_TYPE_P (expr))
11869 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11870 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11872 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11873 if (TREE_CODE (expr) == RECORD_TYPE
11874 || TREE_CODE (expr) == UNION_TYPE
11875 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11876 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11877 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11886 /* Fold a unary tree expression with code CODE of type TYPE with an
11887 operand OP0. Return a folded expression if successful. Otherwise,
11888 return a tree expression with code CODE of type TYPE with an
11892 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11895 #ifdef ENABLE_FOLD_CHECKING
11896 unsigned char checksum_before[16], checksum_after[16];
11897 struct md5_ctx ctx;
11900 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11901 md5_init_ctx (&ctx);
11902 fold_checksum_tree (op0, &ctx, ht);
11903 md5_finish_ctx (&ctx, checksum_before);
11907 tem = fold_unary (code, type, op0);
11909 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11911 #ifdef ENABLE_FOLD_CHECKING
11912 md5_init_ctx (&ctx);
11913 fold_checksum_tree (op0, &ctx, ht);
11914 md5_finish_ctx (&ctx, checksum_after);
11917 if (memcmp (checksum_before, checksum_after, 16))
11918 fold_check_failed (op0, tem);
11923 /* Fold a binary tree expression with code CODE of type TYPE with
11924 operands OP0 and OP1. Return a folded expression if successful.
11925 Otherwise, return a tree expression with code CODE of type TYPE
11926 with operands OP0 and OP1. */
11929 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11933 #ifdef ENABLE_FOLD_CHECKING
11934 unsigned char checksum_before_op0[16],
11935 checksum_before_op1[16],
11936 checksum_after_op0[16],
11937 checksum_after_op1[16];
11938 struct md5_ctx ctx;
11941 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11942 md5_init_ctx (&ctx);
11943 fold_checksum_tree (op0, &ctx, ht);
11944 md5_finish_ctx (&ctx, checksum_before_op0);
11947 md5_init_ctx (&ctx);
11948 fold_checksum_tree (op1, &ctx, ht);
11949 md5_finish_ctx (&ctx, checksum_before_op1);
11953 tem = fold_binary (code, type, op0, op1);
11955 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11957 #ifdef ENABLE_FOLD_CHECKING
11958 md5_init_ctx (&ctx);
11959 fold_checksum_tree (op0, &ctx, ht);
11960 md5_finish_ctx (&ctx, checksum_after_op0);
11963 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11964 fold_check_failed (op0, tem);
11966 md5_init_ctx (&ctx);
11967 fold_checksum_tree (op1, &ctx, ht);
11968 md5_finish_ctx (&ctx, checksum_after_op1);
11971 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11972 fold_check_failed (op1, tem);
11977 /* Fold a ternary tree expression with code CODE of type TYPE with
11978 operands OP0, OP1, and OP2. Return a folded expression if
11979 successful. Otherwise, return a tree expression with code CODE of
11980 type TYPE with operands OP0, OP1, and OP2. */
11983 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11987 #ifdef ENABLE_FOLD_CHECKING
11988 unsigned char checksum_before_op0[16],
11989 checksum_before_op1[16],
11990 checksum_before_op2[16],
11991 checksum_after_op0[16],
11992 checksum_after_op1[16],
11993 checksum_after_op2[16];
11994 struct md5_ctx ctx;
11997 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11998 md5_init_ctx (&ctx);
11999 fold_checksum_tree (op0, &ctx, ht);
12000 md5_finish_ctx (&ctx, checksum_before_op0);
12003 md5_init_ctx (&ctx);
12004 fold_checksum_tree (op1, &ctx, ht);
12005 md5_finish_ctx (&ctx, checksum_before_op1);
12008 md5_init_ctx (&ctx);
12009 fold_checksum_tree (op2, &ctx, ht);
12010 md5_finish_ctx (&ctx, checksum_before_op2);
12014 tem = fold_ternary (code, type, op0, op1, op2);
12016 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12018 #ifdef ENABLE_FOLD_CHECKING
12019 md5_init_ctx (&ctx);
12020 fold_checksum_tree (op0, &ctx, ht);
12021 md5_finish_ctx (&ctx, checksum_after_op0);
12024 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12025 fold_check_failed (op0, tem);
12027 md5_init_ctx (&ctx);
12028 fold_checksum_tree (op1, &ctx, ht);
12029 md5_finish_ctx (&ctx, checksum_after_op1);
12032 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12033 fold_check_failed (op1, tem);
12035 md5_init_ctx (&ctx);
12036 fold_checksum_tree (op2, &ctx, ht);
12037 md5_finish_ctx (&ctx, checksum_after_op2);
12040 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12041 fold_check_failed (op2, tem);
12046 /* Perform constant folding and related simplification of initializer
12047 expression EXPR. These behave identically to "fold_buildN" but ignore
12048 potential run-time traps and exceptions that fold must preserve. */
12050 #define START_FOLD_INIT \
12051 int saved_signaling_nans = flag_signaling_nans;\
12052 int saved_trapping_math = flag_trapping_math;\
12053 int saved_rounding_math = flag_rounding_math;\
12054 int saved_trapv = flag_trapv;\
12055 int saved_folding_initializer = folding_initializer;\
12056 flag_signaling_nans = 0;\
12057 flag_trapping_math = 0;\
12058 flag_rounding_math = 0;\
12060 folding_initializer = 1;
12062 #define END_FOLD_INIT \
12063 flag_signaling_nans = saved_signaling_nans;\
12064 flag_trapping_math = saved_trapping_math;\
12065 flag_rounding_math = saved_rounding_math;\
12066 flag_trapv = saved_trapv;\
12067 folding_initializer = saved_folding_initializer;
12070 fold_build1_initializer (enum tree_code code, tree type, tree op)
12075 result = fold_build1 (code, type, op);
12082 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12087 result = fold_build2 (code, type, op0, op1);
12094 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12100 result = fold_build3 (code, type, op0, op1, op2);
12106 #undef START_FOLD_INIT
12107 #undef END_FOLD_INIT
12109 /* Determine if first argument is a multiple of second argument. Return 0 if
12110 it is not, or we cannot easily determined it to be.
12112 An example of the sort of thing we care about (at this point; this routine
12113 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12114 fold cases do now) is discovering that
12116 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12122 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12124 This code also handles discovering that
12126 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12128 is a multiple of 8 so we don't have to worry about dealing with a
12129 possible remainder.
12131 Note that we *look* inside a SAVE_EXPR only to determine how it was
12132 calculated; it is not safe for fold to do much of anything else with the
12133 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12134 at run time. For example, the latter example above *cannot* be implemented
12135 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12136 evaluation time of the original SAVE_EXPR is not necessarily the same at
12137 the time the new expression is evaluated. The only optimization of this
12138 sort that would be valid is changing
12140 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12144 SAVE_EXPR (I) * SAVE_EXPR (J)
12146 (where the same SAVE_EXPR (J) is used in the original and the
12147 transformed version). */
12150 multiple_of_p (tree type, tree top, tree bottom)
12152 if (operand_equal_p (top, bottom, 0))
12155 if (TREE_CODE (type) != INTEGER_TYPE)
12158 switch (TREE_CODE (top))
12161 /* Bitwise and provides a power of two multiple. If the mask is
12162 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12163 if (!integer_pow2p (bottom))
12168 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12169 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12173 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12174 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12177 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12181 op1 = TREE_OPERAND (top, 1);
12182 /* const_binop may not detect overflow correctly,
12183 so check for it explicitly here. */
12184 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12185 > TREE_INT_CST_LOW (op1)
12186 && TREE_INT_CST_HIGH (op1) == 0
12187 && 0 != (t1 = fold_convert (type,
12188 const_binop (LSHIFT_EXPR,
12191 && ! TREE_OVERFLOW (t1))
12192 return multiple_of_p (type, t1, bottom);
12197 /* Can't handle conversions from non-integral or wider integral type. */
12198 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12199 || (TYPE_PRECISION (type)
12200 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12203 /* .. fall through ... */
12206 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12209 if (TREE_CODE (bottom) != INTEGER_CST
12210 || (TYPE_UNSIGNED (type)
12211 && (tree_int_cst_sgn (top) < 0
12212 || tree_int_cst_sgn (bottom) < 0)))
12214 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12222 /* Return true if `t' is known to be non-negative. */
12225 tree_expr_nonnegative_p (tree t)
12227 if (t == error_mark_node)
12230 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12233 switch (TREE_CODE (t))
12236 /* Query VRP to see if it has recorded any information about
12237 the range of this object. */
12238 return ssa_name_nonnegative_p (t);
12241 /* We can't return 1 if flag_wrapv is set because
12242 ABS_EXPR<INT_MIN> = INT_MIN. */
12243 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12248 return tree_int_cst_sgn (t) >= 0;
12251 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12254 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12255 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12256 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12258 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12259 both unsigned and at least 2 bits shorter than the result. */
12260 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12261 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12262 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12264 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12265 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12266 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12267 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12269 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12270 TYPE_PRECISION (inner2)) + 1;
12271 return prec < TYPE_PRECISION (TREE_TYPE (t));
12277 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12279 /* x * x for floating point x is always non-negative. */
12280 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12282 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12283 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12286 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12287 both unsigned and their total bits is shorter than the result. */
12288 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12289 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12290 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12292 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12293 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12294 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12295 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12296 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12297 < TYPE_PRECISION (TREE_TYPE (t));
12303 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12304 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12310 case TRUNC_DIV_EXPR:
12311 case CEIL_DIV_EXPR:
12312 case FLOOR_DIV_EXPR:
12313 case ROUND_DIV_EXPR:
12314 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12315 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12317 case TRUNC_MOD_EXPR:
12318 case CEIL_MOD_EXPR:
12319 case FLOOR_MOD_EXPR:
12320 case ROUND_MOD_EXPR:
12322 case NON_LVALUE_EXPR:
12324 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12326 case COMPOUND_EXPR:
12328 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12331 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12334 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12335 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12339 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12340 tree outer_type = TREE_TYPE (t);
12342 if (TREE_CODE (outer_type) == REAL_TYPE)
12344 if (TREE_CODE (inner_type) == REAL_TYPE)
12345 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12346 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12348 if (TYPE_UNSIGNED (inner_type))
12350 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12353 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12355 if (TREE_CODE (inner_type) == REAL_TYPE)
12356 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12357 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12358 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12359 && TYPE_UNSIGNED (inner_type);
12366 tree temp = TARGET_EXPR_SLOT (t);
12367 t = TARGET_EXPR_INITIAL (t);
12369 /* If the initializer is non-void, then it's a normal expression
12370 that will be assigned to the slot. */
12371 if (!VOID_TYPE_P (t))
12372 return tree_expr_nonnegative_p (t);
12374 /* Otherwise, the initializer sets the slot in some way. One common
12375 way is an assignment statement at the end of the initializer. */
12378 if (TREE_CODE (t) == BIND_EXPR)
12379 t = expr_last (BIND_EXPR_BODY (t));
12380 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12381 || TREE_CODE (t) == TRY_CATCH_EXPR)
12382 t = expr_last (TREE_OPERAND (t, 0));
12383 else if (TREE_CODE (t) == STATEMENT_LIST)
12388 if (TREE_CODE (t) == MODIFY_EXPR
12389 && TREE_OPERAND (t, 0) == temp)
12390 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12397 tree fndecl = get_callee_fndecl (t);
12398 tree arglist = TREE_OPERAND (t, 1);
12399 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12400 switch (DECL_FUNCTION_CODE (fndecl))
12402 CASE_FLT_FN (BUILT_IN_ACOS):
12403 CASE_FLT_FN (BUILT_IN_ACOSH):
12404 CASE_FLT_FN (BUILT_IN_CABS):
12405 CASE_FLT_FN (BUILT_IN_COSH):
12406 CASE_FLT_FN (BUILT_IN_ERFC):
12407 CASE_FLT_FN (BUILT_IN_EXP):
12408 CASE_FLT_FN (BUILT_IN_EXP10):
12409 CASE_FLT_FN (BUILT_IN_EXP2):
12410 CASE_FLT_FN (BUILT_IN_FABS):
12411 CASE_FLT_FN (BUILT_IN_FDIM):
12412 CASE_FLT_FN (BUILT_IN_HYPOT):
12413 CASE_FLT_FN (BUILT_IN_POW10):
12414 CASE_INT_FN (BUILT_IN_FFS):
12415 CASE_INT_FN (BUILT_IN_PARITY):
12416 CASE_INT_FN (BUILT_IN_POPCOUNT):
12417 case BUILT_IN_BSWAP32:
12418 case BUILT_IN_BSWAP64:
12422 CASE_FLT_FN (BUILT_IN_SQRT):
12423 /* sqrt(-0.0) is -0.0. */
12424 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12426 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12428 CASE_FLT_FN (BUILT_IN_ASINH):
12429 CASE_FLT_FN (BUILT_IN_ATAN):
12430 CASE_FLT_FN (BUILT_IN_ATANH):
12431 CASE_FLT_FN (BUILT_IN_CBRT):
12432 CASE_FLT_FN (BUILT_IN_CEIL):
12433 CASE_FLT_FN (BUILT_IN_ERF):
12434 CASE_FLT_FN (BUILT_IN_EXPM1):
12435 CASE_FLT_FN (BUILT_IN_FLOOR):
12436 CASE_FLT_FN (BUILT_IN_FMOD):
12437 CASE_FLT_FN (BUILT_IN_FREXP):
12438 CASE_FLT_FN (BUILT_IN_LCEIL):
12439 CASE_FLT_FN (BUILT_IN_LDEXP):
12440 CASE_FLT_FN (BUILT_IN_LFLOOR):
12441 CASE_FLT_FN (BUILT_IN_LLCEIL):
12442 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12443 CASE_FLT_FN (BUILT_IN_LLRINT):
12444 CASE_FLT_FN (BUILT_IN_LLROUND):
12445 CASE_FLT_FN (BUILT_IN_LRINT):
12446 CASE_FLT_FN (BUILT_IN_LROUND):
12447 CASE_FLT_FN (BUILT_IN_MODF):
12448 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12449 CASE_FLT_FN (BUILT_IN_RINT):
12450 CASE_FLT_FN (BUILT_IN_ROUND):
12451 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12452 CASE_FLT_FN (BUILT_IN_SINH):
12453 CASE_FLT_FN (BUILT_IN_TANH):
12454 CASE_FLT_FN (BUILT_IN_TRUNC):
12455 /* True if the 1st argument is nonnegative. */
12456 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12458 CASE_FLT_FN (BUILT_IN_FMAX):
12459 /* True if the 1st OR 2nd arguments are nonnegative. */
12460 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12461 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12463 CASE_FLT_FN (BUILT_IN_FMIN):
12464 /* True if the 1st AND 2nd arguments are nonnegative. */
12465 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12466 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12468 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12469 /* True if the 2nd argument is nonnegative. */
12470 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12472 CASE_FLT_FN (BUILT_IN_POWI):
12473 /* True if the 1st argument is nonnegative or the second
12474 argument is an even integer. */
12475 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12477 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12478 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12481 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12483 CASE_FLT_FN (BUILT_IN_POW):
12484 /* True if the 1st argument is nonnegative or the second
12485 argument is an even integer valued real. */
12486 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12491 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12492 n = real_to_integer (&c);
12495 REAL_VALUE_TYPE cint;
12496 real_from_integer (&cint, VOIDmode, n,
12497 n < 0 ? -1 : 0, 0);
12498 if (real_identical (&c, &cint))
12502 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12509 /* ... fall through ... */
12512 if (truth_value_p (TREE_CODE (t)))
12513 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12517 /* We don't know sign of `t', so be conservative and return false. */
12521 /* Return true when T is an address and is known to be nonzero.
12522 For floating point we further ensure that T is not denormal.
12523 Similar logic is present in nonzero_address in rtlanal.h. */
12526 tree_expr_nonzero_p (tree t)
12528 tree type = TREE_TYPE (t);
12530 /* Doing something useful for floating point would need more work. */
12531 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12534 switch (TREE_CODE (t))
12537 /* Query VRP to see if it has recorded any information about
12538 the range of this object. */
12539 return ssa_name_nonzero_p (t);
12542 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12545 /* We used to test for !integer_zerop here. This does not work correctly
12546 if TREE_CONSTANT_OVERFLOW (t). */
12547 return (TREE_INT_CST_LOW (t) != 0
12548 || TREE_INT_CST_HIGH (t) != 0);
12551 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12553 /* With the presence of negative values it is hard
12554 to say something. */
12555 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12556 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12558 /* One of operands must be positive and the other non-negative. */
12559 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12560 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12565 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12567 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12568 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12574 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12575 tree outer_type = TREE_TYPE (t);
12577 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12578 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12584 tree base = get_base_address (TREE_OPERAND (t, 0));
12589 /* Weak declarations may link to NULL. */
12590 if (VAR_OR_FUNCTION_DECL_P (base))
12591 return !DECL_WEAK (base);
12593 /* Constants are never weak. */
12594 if (CONSTANT_CLASS_P (base))
12601 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12602 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12605 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12606 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12609 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12611 /* When both operands are nonzero, then MAX must be too. */
12612 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12615 /* MAX where operand 0 is positive is positive. */
12616 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12618 /* MAX where operand 1 is positive is positive. */
12619 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12620 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12624 case COMPOUND_EXPR:
12627 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12630 case NON_LVALUE_EXPR:
12631 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12634 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12635 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12638 return alloca_call_p (t);
12646 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12647 attempt to fold the expression to a constant without modifying TYPE,
12650 If the expression could be simplified to a constant, then return
12651 the constant. If the expression would not be simplified to a
12652 constant, then return NULL_TREE. */
12655 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12657 tree tem = fold_binary (code, type, op0, op1);
12658 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12661 /* Given the components of a unary expression CODE, TYPE and OP0,
12662 attempt to fold the expression to a constant without modifying
12665 If the expression could be simplified to a constant, then return
12666 the constant. If the expression would not be simplified to a
12667 constant, then return NULL_TREE. */
12670 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12672 tree tem = fold_unary (code, type, op0);
12673 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12676 /* If EXP represents referencing an element in a constant string
12677 (either via pointer arithmetic or array indexing), return the
12678 tree representing the value accessed, otherwise return NULL. */
12681 fold_read_from_constant_string (tree exp)
12683 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12685 tree exp1 = TREE_OPERAND (exp, 0);
12689 if (TREE_CODE (exp) == INDIRECT_REF)
12690 string = string_constant (exp1, &index);
12693 tree low_bound = array_ref_low_bound (exp);
12694 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12696 /* Optimize the special-case of a zero lower bound.
12698 We convert the low_bound to sizetype to avoid some problems
12699 with constant folding. (E.g. suppose the lower bound is 1,
12700 and its mode is QI. Without the conversion,l (ARRAY
12701 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12702 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12703 if (! integer_zerop (low_bound))
12704 index = size_diffop (index, fold_convert (sizetype, low_bound));
12710 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12711 && TREE_CODE (string) == STRING_CST
12712 && TREE_CODE (index) == INTEGER_CST
12713 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12714 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12716 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12717 return fold_convert (TREE_TYPE (exp),
12718 build_int_cst (NULL_TREE,
12719 (TREE_STRING_POINTER (string)
12720 [TREE_INT_CST_LOW (index)])));
12725 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12726 an integer constant or real constant.
12728 TYPE is the type of the result. */
12731 fold_negate_const (tree arg0, tree type)
12733 tree t = NULL_TREE;
12735 switch (TREE_CODE (arg0))
12739 unsigned HOST_WIDE_INT low;
12740 HOST_WIDE_INT high;
12741 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12742 TREE_INT_CST_HIGH (arg0),
12744 t = build_int_cst_wide (type, low, high);
12745 t = force_fit_type (t, 1,
12746 (overflow | TREE_OVERFLOW (arg0))
12747 && !TYPE_UNSIGNED (type),
12748 TREE_CONSTANT_OVERFLOW (arg0));
12753 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12757 gcc_unreachable ();
12763 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12764 an integer constant or real constant.
12766 TYPE is the type of the result. */
12769 fold_abs_const (tree arg0, tree type)
12771 tree t = NULL_TREE;
12773 switch (TREE_CODE (arg0))
12776 /* If the value is unsigned, then the absolute value is
12777 the same as the ordinary value. */
12778 if (TYPE_UNSIGNED (type))
12780 /* Similarly, if the value is non-negative. */
12781 else if (INT_CST_LT (integer_minus_one_node, arg0))
12783 /* If the value is negative, then the absolute value is
12787 unsigned HOST_WIDE_INT low;
12788 HOST_WIDE_INT high;
12789 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12790 TREE_INT_CST_HIGH (arg0),
12792 t = build_int_cst_wide (type, low, high);
12793 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12794 TREE_CONSTANT_OVERFLOW (arg0));
12799 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12800 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12806 gcc_unreachable ();
12812 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12813 constant. TYPE is the type of the result. */
12816 fold_not_const (tree arg0, tree type)
12818 tree t = NULL_TREE;
12820 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12822 t = build_int_cst_wide (type,
12823 ~ TREE_INT_CST_LOW (arg0),
12824 ~ TREE_INT_CST_HIGH (arg0));
12825 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12826 TREE_CONSTANT_OVERFLOW (arg0));
12831 /* Given CODE, a relational operator, the target type, TYPE and two
12832 constant operands OP0 and OP1, return the result of the
12833 relational operation. If the result is not a compile time
12834 constant, then return NULL_TREE. */
12837 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12839 int result, invert;
12841 /* From here on, the only cases we handle are when the result is
12842 known to be a constant. */
12844 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12846 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12847 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12849 /* Handle the cases where either operand is a NaN. */
12850 if (real_isnan (c0) || real_isnan (c1))
12860 case UNORDERED_EXPR:
12874 if (flag_trapping_math)
12880 gcc_unreachable ();
12883 return constant_boolean_node (result, type);
12886 return constant_boolean_node (real_compare (code, c0, c1), type);
12889 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12891 To compute GT, swap the arguments and do LT.
12892 To compute GE, do LT and invert the result.
12893 To compute LE, swap the arguments, do LT and invert the result.
12894 To compute NE, do EQ and invert the result.
12896 Therefore, the code below must handle only EQ and LT. */
12898 if (code == LE_EXPR || code == GT_EXPR)
12903 code = swap_tree_comparison (code);
12906 /* Note that it is safe to invert for real values here because we
12907 have already handled the one case that it matters. */
12910 if (code == NE_EXPR || code == GE_EXPR)
12913 code = invert_tree_comparison (code, false);
12916 /* Compute a result for LT or EQ if args permit;
12917 Otherwise return T. */
12918 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12920 if (code == EQ_EXPR)
12921 result = tree_int_cst_equal (op0, op1);
12922 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12923 result = INT_CST_LT_UNSIGNED (op0, op1);
12925 result = INT_CST_LT (op0, op1);
12932 return constant_boolean_node (result, type);
12935 /* Build an expression for the a clean point containing EXPR with type TYPE.
12936 Don't build a cleanup point expression for EXPR which don't have side
12940 fold_build_cleanup_point_expr (tree type, tree expr)
12942 /* If the expression does not have side effects then we don't have to wrap
12943 it with a cleanup point expression. */
12944 if (!TREE_SIDE_EFFECTS (expr))
12947 /* If the expression is a return, check to see if the expression inside the
12948 return has no side effects or the right hand side of the modify expression
12949 inside the return. If either don't have side effects set we don't need to
12950 wrap the expression in a cleanup point expression. Note we don't check the
12951 left hand side of the modify because it should always be a return decl. */
12952 if (TREE_CODE (expr) == RETURN_EXPR)
12954 tree op = TREE_OPERAND (expr, 0);
12955 if (!op || !TREE_SIDE_EFFECTS (op))
12957 op = TREE_OPERAND (op, 1);
12958 if (!TREE_SIDE_EFFECTS (op))
12962 return build1 (CLEANUP_POINT_EXPR, type, expr);
12965 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12966 avoid confusing the gimplify process. */
12969 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12971 /* The size of the object is not relevant when talking about its address. */
12972 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12973 t = TREE_OPERAND (t, 0);
12975 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12976 if (TREE_CODE (t) == INDIRECT_REF
12977 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12979 t = TREE_OPERAND (t, 0);
12980 if (TREE_TYPE (t) != ptrtype)
12981 t = build1 (NOP_EXPR, ptrtype, t);
12987 while (handled_component_p (base))
12988 base = TREE_OPERAND (base, 0);
12990 TREE_ADDRESSABLE (base) = 1;
12992 t = build1 (ADDR_EXPR, ptrtype, t);
12999 build_fold_addr_expr (tree t)
13001 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13004 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13005 of an indirection through OP0, or NULL_TREE if no simplification is
13009 fold_indirect_ref_1 (tree type, tree op0)
13015 subtype = TREE_TYPE (sub);
13016 if (!POINTER_TYPE_P (subtype))
13019 if (TREE_CODE (sub) == ADDR_EXPR)
13021 tree op = TREE_OPERAND (sub, 0);
13022 tree optype = TREE_TYPE (op);
13023 /* *&p => p; make sure to handle *&"str"[cst] here. */
13024 if (type == optype)
13026 tree fop = fold_read_from_constant_string (op);
13032 /* *(foo *)&fooarray => fooarray[0] */
13033 else if (TREE_CODE (optype) == ARRAY_TYPE
13034 && type == TREE_TYPE (optype))
13036 tree type_domain = TYPE_DOMAIN (optype);
13037 tree min_val = size_zero_node;
13038 if (type_domain && TYPE_MIN_VALUE (type_domain))
13039 min_val = TYPE_MIN_VALUE (type_domain);
13040 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13042 /* *(foo *)&complexfoo => __real__ complexfoo */
13043 else if (TREE_CODE (optype) == COMPLEX_TYPE
13044 && type == TREE_TYPE (optype))
13045 return fold_build1 (REALPART_EXPR, type, op);
13048 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13049 if (TREE_CODE (sub) == PLUS_EXPR
13050 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13052 tree op00 = TREE_OPERAND (sub, 0);
13053 tree op01 = TREE_OPERAND (sub, 1);
13057 op00type = TREE_TYPE (op00);
13058 if (TREE_CODE (op00) == ADDR_EXPR
13059 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13060 && type == TREE_TYPE (TREE_TYPE (op00type)))
13062 tree size = TYPE_SIZE_UNIT (type);
13063 if (tree_int_cst_equal (size, op01))
13064 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13068 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13069 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13070 && type == TREE_TYPE (TREE_TYPE (subtype)))
13073 tree min_val = size_zero_node;
13074 sub = build_fold_indirect_ref (sub);
13075 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13076 if (type_domain && TYPE_MIN_VALUE (type_domain))
13077 min_val = TYPE_MIN_VALUE (type_domain);
13078 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13084 /* Builds an expression for an indirection through T, simplifying some
13088 build_fold_indirect_ref (tree t)
13090 tree type = TREE_TYPE (TREE_TYPE (t));
13091 tree sub = fold_indirect_ref_1 (type, t);
13096 return build1 (INDIRECT_REF, type, t);
13099 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13102 fold_indirect_ref (tree t)
13104 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13112 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13113 whose result is ignored. The type of the returned tree need not be
13114 the same as the original expression. */
13117 fold_ignored_result (tree t)
13119 if (!TREE_SIDE_EFFECTS (t))
13120 return integer_zero_node;
13123 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13126 t = TREE_OPERAND (t, 0);
13130 case tcc_comparison:
13131 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13132 t = TREE_OPERAND (t, 0);
13133 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13134 t = TREE_OPERAND (t, 1);
13139 case tcc_expression:
13140 switch (TREE_CODE (t))
13142 case COMPOUND_EXPR:
13143 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13145 t = TREE_OPERAND (t, 0);
13149 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13150 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13152 t = TREE_OPERAND (t, 0);
13165 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13166 This can only be applied to objects of a sizetype. */
13169 round_up (tree value, int divisor)
13171 tree div = NULL_TREE;
13173 gcc_assert (divisor > 0);
13177 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13178 have to do anything. Only do this when we are not given a const,
13179 because in that case, this check is more expensive than just
13181 if (TREE_CODE (value) != INTEGER_CST)
13183 div = build_int_cst (TREE_TYPE (value), divisor);
13185 if (multiple_of_p (TREE_TYPE (value), value, div))
13189 /* If divisor is a power of two, simplify this to bit manipulation. */
13190 if (divisor == (divisor & -divisor))
13194 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13195 value = size_binop (PLUS_EXPR, value, t);
13196 t = build_int_cst (TREE_TYPE (value), -divisor);
13197 value = size_binop (BIT_AND_EXPR, value, t);
13202 div = build_int_cst (TREE_TYPE (value), divisor);
13203 value = size_binop (CEIL_DIV_EXPR, value, div);
13204 value = size_binop (MULT_EXPR, value, div);
13210 /* Likewise, but round down. */
13213 round_down (tree value, int divisor)
13215 tree div = NULL_TREE;
13217 gcc_assert (divisor > 0);
13221 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13222 have to do anything. Only do this when we are not given a const,
13223 because in that case, this check is more expensive than just
13225 if (TREE_CODE (value) != INTEGER_CST)
13227 div = build_int_cst (TREE_TYPE (value), divisor);
13229 if (multiple_of_p (TREE_TYPE (value), value, div))
13233 /* If divisor is a power of two, simplify this to bit manipulation. */
13234 if (divisor == (divisor & -divisor))
13238 t = build_int_cst (TREE_TYPE (value), -divisor);
13239 value = size_binop (BIT_AND_EXPR, value, t);
13244 div = build_int_cst (TREE_TYPE (value), divisor);
13245 value = size_binop (FLOOR_DIV_EXPR, value, div);
13246 value = size_binop (MULT_EXPR, value, div);
13252 /* Returns the pointer to the base of the object addressed by EXP and
13253 extracts the information about the offset of the access, storing it
13254 to PBITPOS and POFFSET. */
13257 split_address_to_core_and_offset (tree exp,
13258 HOST_WIDE_INT *pbitpos, tree *poffset)
13261 enum machine_mode mode;
13262 int unsignedp, volatilep;
13263 HOST_WIDE_INT bitsize;
13265 if (TREE_CODE (exp) == ADDR_EXPR)
13267 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13268 poffset, &mode, &unsignedp, &volatilep,
13270 core = build_fold_addr_expr (core);
13276 *poffset = NULL_TREE;
13282 /* Returns true if addresses of E1 and E2 differ by a constant, false
13283 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13286 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13289 HOST_WIDE_INT bitpos1, bitpos2;
13290 tree toffset1, toffset2, tdiff, type;
13292 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13293 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13295 if (bitpos1 % BITS_PER_UNIT != 0
13296 || bitpos2 % BITS_PER_UNIT != 0
13297 || !operand_equal_p (core1, core2, 0))
13300 if (toffset1 && toffset2)
13302 type = TREE_TYPE (toffset1);
13303 if (type != TREE_TYPE (toffset2))
13304 toffset2 = fold_convert (type, toffset2);
13306 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13307 if (!cst_and_fits_in_hwi (tdiff))
13310 *diff = int_cst_value (tdiff);
13312 else if (toffset1 || toffset2)
13314 /* If only one of the offsets is non-constant, the difference cannot
13321 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13325 /* Simplify the floating point expression EXP when the sign of the
13326 result is not significant. Return NULL_TREE if no simplification
13330 fold_strip_sign_ops (tree exp)
13334 switch (TREE_CODE (exp))
13338 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13339 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13343 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13345 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13346 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13347 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13348 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13349 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13350 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13354 /* Strip sign ops from the argument of "odd" math functions. */
13355 if (negate_mathfn_p (builtin_mathfn_code (exp)))
13357 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13359 return build_function_call_expr (get_callee_fndecl (exp),
13360 build_tree_list (NULL_TREE, arg0));