1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 || (overflowable > 0 && sign_extended_type))
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
286 TREE_CONSTANT_OVERFLOW (t) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Return nonzero if the operation overflows according to UNSIGNED_P.
295 Each argument is given as two `HOST_WIDE_INT' pieces.
296 One argument is L1 and H1; the other, L2 and H2.
297 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
301 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
302 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305 unsigned HOST_WIDE_INT l;
309 h = h1 + h2 + (l < l1);
315 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
317 return OVERFLOW_SUM_SIGN (h1, h2, h);
320 /* Negate a doubleword integer with doubleword result.
321 Return nonzero if the operation overflows, assuming it's signed.
322 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
333 return (*hv & h1) < 0;
343 /* Multiply two doubleword integers with doubleword result.
344 Return nonzero if the operation overflows according to UNSIGNED_P.
345 Each argument is given as two `HOST_WIDE_INT' pieces.
346 One argument is L1 and H1; the other, L2 and H2.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355 HOST_WIDE_INT arg1[4];
356 HOST_WIDE_INT arg2[4];
357 HOST_WIDE_INT prod[4 * 2];
358 unsigned HOST_WIDE_INT carry;
360 unsigned HOST_WIDE_INT toplow, neglow;
361 HOST_WIDE_INT tophigh, neghigh;
363 encode (arg1, l1, h1);
364 encode (arg2, l2, h2);
366 memset (prod, 0, sizeof prod);
368 for (i = 0; i < 4; i++)
371 for (j = 0; j < 4; j++)
374 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
375 carry += arg1[i] * arg2[j];
376 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
378 prod[k] = LOWPART (carry);
379 carry = HIGHPART (carry);
384 decode (prod, lv, hv);
385 decode (prod + 4, &toplow, &tophigh);
387 /* Unsigned overflow is immediate. */
389 return (toplow | tophigh) != 0;
391 /* Check for signed overflow by calculating the signed representation of the
392 top half of the result; it should agree with the low half's sign bit. */
395 neg_double (l2, h2, &neglow, &neghigh);
396 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
400 neg_double (l1, h1, &neglow, &neghigh);
401 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
403 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406 /* Shift the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Shift right if COUNT is negative.
409 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
414 HOST_WIDE_INT count, unsigned int prec,
415 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
417 unsigned HOST_WIDE_INT signmask;
421 rshift_double (l1, h1, -count, prec, lv, hv, arith);
425 if (SHIFT_COUNT_TRUNCATED)
428 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
430 /* Shifting by the host word size is undefined according to the
431 ANSI standard, so we must handle this as a special case. */
435 else if (count >= HOST_BITS_PER_WIDE_INT)
437 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
442 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
443 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
447 /* Sign extend all bits that are beyond the precision. */
449 signmask = -((prec > HOST_BITS_PER_WIDE_INT
450 ? ((unsigned HOST_WIDE_INT) *hv
451 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
452 : (*lv >> (prec - 1))) & 1);
454 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
456 else if (prec >= HOST_BITS_PER_WIDE_INT)
458 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
459 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
464 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
465 *lv |= signmask << prec;
469 /* Shift the doubleword integer in L1, H1 right by COUNT places
470 keeping only PREC bits of result. COUNT must be positive.
471 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
472 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
476 HOST_WIDE_INT count, unsigned int prec,
477 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480 unsigned HOST_WIDE_INT signmask;
483 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486 if (SHIFT_COUNT_TRUNCATED)
489 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
491 /* Shifting by the host word size is undefined according to the
492 ANSI standard, so we must handle this as a special case. */
496 else if (count >= HOST_BITS_PER_WIDE_INT)
499 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
503 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
505 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508 /* Zero / sign extend all bits that are beyond the precision. */
510 if (count >= (HOST_WIDE_INT)prec)
515 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
517 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
519 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
520 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
525 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
526 *lv |= signmask << (prec - count);
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531 keeping only PREC bits of result.
532 Rotate right if COUNT is negative.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
537 HOST_WIDE_INT count, unsigned int prec,
538 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
540 unsigned HOST_WIDE_INT s1l, s2l;
541 HOST_WIDE_INT s1h, s2h;
547 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
548 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 /* Rotate the doubleword integer in L1, H1 left by COUNT places
554 keeping only PREC bits of result. COUNT must be positive.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
569 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
576 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
577 CODE is a tree code for a kind of division, one of
578 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
580 It controls how the quotient is rounded to an integer.
581 Return nonzero if the operation overflows.
582 UNS nonzero says do unsigned division. */
585 div_and_round_double (enum tree_code code, int uns,
586 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
587 HOST_WIDE_INT hnum_orig,
588 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
589 HOST_WIDE_INT hden_orig,
590 unsigned HOST_WIDE_INT *lquo,
591 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
595 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
596 HOST_WIDE_INT den[4], quo[4];
598 unsigned HOST_WIDE_INT work;
599 unsigned HOST_WIDE_INT carry = 0;
600 unsigned HOST_WIDE_INT lnum = lnum_orig;
601 HOST_WIDE_INT hnum = hnum_orig;
602 unsigned HOST_WIDE_INT lden = lden_orig;
603 HOST_WIDE_INT hden = hden_orig;
606 if (hden == 0 && lden == 0)
607 overflow = 1, lden = 1;
609 /* Calculate quotient sign and convert operands to unsigned. */
615 /* (minimum integer) / (-1) is the only overflow case. */
616 if (neg_double (lnum, hnum, &lnum, &hnum)
617 && ((HOST_WIDE_INT) lden & hden) == -1)
623 neg_double (lden, hden, &lden, &hden);
627 if (hnum == 0 && hden == 0)
628 { /* single precision */
630 /* This unsigned division rounds toward zero. */
636 { /* trivial case: dividend < divisor */
637 /* hden != 0 already checked. */
644 memset (quo, 0, sizeof quo);
646 memset (num, 0, sizeof num); /* to zero 9th element */
647 memset (den, 0, sizeof den);
649 encode (num, lnum, hnum);
650 encode (den, lden, hden);
652 /* Special code for when the divisor < BASE. */
653 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
655 /* hnum != 0 already checked. */
656 for (i = 4 - 1; i >= 0; i--)
658 work = num[i] + carry * BASE;
659 quo[i] = work / lden;
665 /* Full double precision division,
666 with thanks to Don Knuth's "Seminumerical Algorithms". */
667 int num_hi_sig, den_hi_sig;
668 unsigned HOST_WIDE_INT quo_est, scale;
670 /* Find the highest nonzero divisor digit. */
671 for (i = 4 - 1;; i--)
678 /* Insure that the first digit of the divisor is at least BASE/2.
679 This is required by the quotient digit estimation algorithm. */
681 scale = BASE / (den[den_hi_sig] + 1);
683 { /* scale divisor and dividend */
685 for (i = 0; i <= 4 - 1; i++)
687 work = (num[i] * scale) + carry;
688 num[i] = LOWPART (work);
689 carry = HIGHPART (work);
694 for (i = 0; i <= 4 - 1; i++)
696 work = (den[i] * scale) + carry;
697 den[i] = LOWPART (work);
698 carry = HIGHPART (work);
699 if (den[i] != 0) den_hi_sig = i;
706 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
708 /* Guess the next quotient digit, quo_est, by dividing the first
709 two remaining dividend digits by the high order quotient digit.
710 quo_est is never low and is at most 2 high. */
711 unsigned HOST_WIDE_INT tmp;
713 num_hi_sig = i + den_hi_sig + 1;
714 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
715 if (num[num_hi_sig] != den[den_hi_sig])
716 quo_est = work / den[den_hi_sig];
720 /* Refine quo_est so it's usually correct, and at most one high. */
721 tmp = work - quo_est * den[den_hi_sig];
723 && (den[den_hi_sig - 1] * quo_est
724 > (tmp * BASE + num[num_hi_sig - 2])))
727 /* Try QUO_EST as the quotient digit, by multiplying the
728 divisor by QUO_EST and subtracting from the remaining dividend.
729 Keep in mind that QUO_EST is the I - 1st digit. */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = quo_est * den[j] + carry;
735 carry = HIGHPART (work);
736 work = num[i + j] - LOWPART (work);
737 num[i + j] = LOWPART (work);
738 carry += HIGHPART (work) != 0;
741 /* If quo_est was high by one, then num[i] went negative and
742 we need to correct things. */
743 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 carry = 0; /* add divisor back in */
747 for (j = 0; j <= den_hi_sig; j++)
749 work = num[i + j] + den[j] + carry;
750 carry = HIGHPART (work);
751 num[i + j] = LOWPART (work);
754 num [num_hi_sig] += carry;
757 /* Store the quotient digit. */
762 decode (quo, lquo, hquo);
765 /* If result is negative, make it so. */
767 neg_double (*lquo, *hquo, lquo, hquo);
769 /* Compute trial remainder: rem = num - (quo * den) */
770 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
771 neg_double (*lrem, *hrem, lrem, hrem);
772 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
777 case TRUNC_MOD_EXPR: /* round toward zero */
778 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
782 case FLOOR_MOD_EXPR: /* round toward negative infinity */
783 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
794 case CEIL_MOD_EXPR: /* round toward positive infinity */
795 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
797 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
805 case ROUND_MOD_EXPR: /* round to closest integer */
807 unsigned HOST_WIDE_INT labs_rem = *lrem;
808 HOST_WIDE_INT habs_rem = *hrem;
809 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
810 HOST_WIDE_INT habs_den = hden, htwice;
812 /* Get absolute values. */
814 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
816 neg_double (lden, hden, &labs_den, &habs_den);
818 /* If (2 * abs (lrem) >= abs (lden)) */
819 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
820 labs_rem, habs_rem, <wice, &htwice);
822 if (((unsigned HOST_WIDE_INT) habs_den
823 < (unsigned HOST_WIDE_INT) htwice)
824 || (((unsigned HOST_WIDE_INT) habs_den
825 == (unsigned HOST_WIDE_INT) htwice)
826 && (labs_den < ltwice)))
830 add_double (*lquo, *hquo,
831 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
846 /* Compute true remainder: rem = num - (quo * den) */
847 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
848 neg_double (*lrem, *hrem, lrem, hrem);
849 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
853 /* If ARG2 divides ARG1 with zero remainder, carries out the division
854 of type CODE and returns the quotient.
855 Otherwise returns NULL_TREE. */
858 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
860 unsigned HOST_WIDE_INT int1l, int2l;
861 HOST_WIDE_INT int1h, int2h;
862 unsigned HOST_WIDE_INT quol, reml;
863 HOST_WIDE_INT quoh, remh;
864 tree type = TREE_TYPE (arg1);
865 int uns = TYPE_UNSIGNED (type);
867 int1l = TREE_INT_CST_LOW (arg1);
868 int1h = TREE_INT_CST_HIGH (arg1);
869 int2l = TREE_INT_CST_LOW (arg2);
870 int2h = TREE_INT_CST_HIGH (arg2);
872 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
873 &quol, &quoh, &reml, &remh);
874 if (remh != 0 || reml != 0)
877 return build_int_cst_wide (type, quol, quoh);
880 /* Return true if the built-in mathematical function specified by CODE
881 is odd, i.e. -f(x) == f(-x). */
884 negate_mathfn_p (enum built_in_function code)
888 CASE_FLT_FN (BUILT_IN_ASIN):
889 CASE_FLT_FN (BUILT_IN_ASINH):
890 CASE_FLT_FN (BUILT_IN_ATAN):
891 CASE_FLT_FN (BUILT_IN_ATANH):
892 CASE_FLT_FN (BUILT_IN_CBRT):
893 CASE_FLT_FN (BUILT_IN_SIN):
894 CASE_FLT_FN (BUILT_IN_SINH):
895 CASE_FLT_FN (BUILT_IN_TAN):
896 CASE_FLT_FN (BUILT_IN_TANH):
897 CASE_FLT_FN (BUILT_IN_ERF):
906 /* Check whether we may negate an integer constant T without causing
910 may_negate_without_overflow_p (tree t)
912 unsigned HOST_WIDE_INT val;
916 gcc_assert (TREE_CODE (t) == INTEGER_CST);
918 type = TREE_TYPE (t);
919 if (TYPE_UNSIGNED (type))
922 prec = TYPE_PRECISION (type);
923 if (prec > HOST_BITS_PER_WIDE_INT)
925 if (TREE_INT_CST_LOW (t) != 0)
927 prec -= HOST_BITS_PER_WIDE_INT;
928 val = TREE_INT_CST_HIGH (t);
931 val = TREE_INT_CST_LOW (t);
932 if (prec < HOST_BITS_PER_WIDE_INT)
933 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
934 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
937 /* Determine whether an expression T can be cheaply negated using
938 the function negate_expr without introducing undefined overflow. */
941 negate_expr_p (tree t)
948 type = TREE_TYPE (t);
951 switch (TREE_CODE (t))
954 if (TYPE_UNSIGNED (type)
955 || (flag_wrapv && ! flag_trapv))
958 /* Check that -CST will not overflow type. */
959 return may_negate_without_overflow_p (t);
961 return INTEGRAL_TYPE_P (type)
962 && (TYPE_UNSIGNED (type)
963 || (flag_wrapv && !flag_trapv));
970 return negate_expr_p (TREE_REALPART (t))
971 && negate_expr_p (TREE_IMAGPART (t));
974 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
976 /* -(A + B) -> (-B) - A. */
977 if (negate_expr_p (TREE_OPERAND (t, 1))
978 && reorder_operands_p (TREE_OPERAND (t, 0),
979 TREE_OPERAND (t, 1)))
981 /* -(A + B) -> (-A) - B. */
982 return negate_expr_p (TREE_OPERAND (t, 0));
985 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
986 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987 && reorder_operands_p (TREE_OPERAND (t, 0),
988 TREE_OPERAND (t, 1));
991 if (TYPE_UNSIGNED (TREE_TYPE (t)))
997 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
998 return negate_expr_p (TREE_OPERAND (t, 1))
999 || negate_expr_p (TREE_OPERAND (t, 0));
1002 case TRUNC_DIV_EXPR:
1003 case ROUND_DIV_EXPR:
1004 case FLOOR_DIV_EXPR:
1006 case EXACT_DIV_EXPR:
1007 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1009 return negate_expr_p (TREE_OPERAND (t, 1))
1010 || negate_expr_p (TREE_OPERAND (t, 0));
1013 /* Negate -((double)float) as (double)(-float). */
1014 if (TREE_CODE (type) == REAL_TYPE)
1016 tree tem = strip_float_extensions (t);
1018 return negate_expr_p (tem);
1023 /* Negate -f(x) as f(-x). */
1024 if (negate_mathfn_p (builtin_mathfn_code (t)))
1025 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1029 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1030 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1032 tree op1 = TREE_OPERAND (t, 1);
1033 if (TREE_INT_CST_HIGH (op1) == 0
1034 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1035 == TREE_INT_CST_LOW (op1))
1046 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1047 simplification is possible.
1048 If negate_expr_p would return true for T, NULL_TREE will never be
1052 fold_negate_expr (tree t)
1054 tree type = TREE_TYPE (t);
1057 switch (TREE_CODE (t))
1059 /* Convert - (~A) to A + 1. */
1061 if (INTEGRAL_TYPE_P (type))
1062 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1063 build_int_cst (type, 1));
1067 tem = fold_negate_const (t, type);
1068 if (! TREE_OVERFLOW (tem)
1069 || TYPE_UNSIGNED (type)
1075 tem = fold_negate_const (t, type);
1076 /* Two's complement FP formats, such as c4x, may overflow. */
1077 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1083 tree rpart = negate_expr (TREE_REALPART (t));
1084 tree ipart = negate_expr (TREE_IMAGPART (t));
1086 if ((TREE_CODE (rpart) == REAL_CST
1087 && TREE_CODE (ipart) == REAL_CST)
1088 || (TREE_CODE (rpart) == INTEGER_CST
1089 && TREE_CODE (ipart) == INTEGER_CST))
1090 return build_complex (type, rpart, ipart);
1095 return TREE_OPERAND (t, 0);
1098 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1100 /* -(A + B) -> (-B) - A. */
1101 if (negate_expr_p (TREE_OPERAND (t, 1))
1102 && reorder_operands_p (TREE_OPERAND (t, 0),
1103 TREE_OPERAND (t, 1)))
1105 tem = negate_expr (TREE_OPERAND (t, 1));
1106 return fold_build2 (MINUS_EXPR, type,
1107 tem, TREE_OPERAND (t, 0));
1110 /* -(A + B) -> (-A) - B. */
1111 if (negate_expr_p (TREE_OPERAND (t, 0)))
1113 tem = negate_expr (TREE_OPERAND (t, 0));
1114 return fold_build2 (MINUS_EXPR, type,
1115 tem, TREE_OPERAND (t, 1));
1121 /* - (A - B) -> B - A */
1122 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1123 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1124 return fold_build2 (MINUS_EXPR, type,
1125 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1129 if (TYPE_UNSIGNED (type))
1135 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1137 tem = TREE_OPERAND (t, 1);
1138 if (negate_expr_p (tem))
1139 return fold_build2 (TREE_CODE (t), type,
1140 TREE_OPERAND (t, 0), negate_expr (tem));
1141 tem = TREE_OPERAND (t, 0);
1142 if (negate_expr_p (tem))
1143 return fold_build2 (TREE_CODE (t), type,
1144 negate_expr (tem), TREE_OPERAND (t, 1));
1148 case TRUNC_DIV_EXPR:
1149 case ROUND_DIV_EXPR:
1150 case FLOOR_DIV_EXPR:
1152 case EXACT_DIV_EXPR:
1153 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1155 tem = TREE_OPERAND (t, 1);
1156 if (negate_expr_p (tem))
1157 return fold_build2 (TREE_CODE (t), type,
1158 TREE_OPERAND (t, 0), negate_expr (tem));
1159 tem = TREE_OPERAND (t, 0);
1160 if (negate_expr_p (tem))
1161 return fold_build2 (TREE_CODE (t), type,
1162 negate_expr (tem), TREE_OPERAND (t, 1));
1167 /* Convert -((double)float) into (double)(-float). */
1168 if (TREE_CODE (type) == REAL_TYPE)
1170 tem = strip_float_extensions (t);
1171 if (tem != t && negate_expr_p (tem))
1172 return negate_expr (tem);
1177 /* Negate -f(x) as f(-x). */
1178 if (negate_mathfn_p (builtin_mathfn_code (t))
1179 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1181 tree fndecl, arg, arglist;
1183 fndecl = get_callee_fndecl (t);
1184 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1185 arglist = build_tree_list (NULL_TREE, arg);
1186 return build_function_call_expr (fndecl, arglist);
1191 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1192 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1194 tree op1 = TREE_OPERAND (t, 1);
1195 if (TREE_INT_CST_HIGH (op1) == 0
1196 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1197 == TREE_INT_CST_LOW (op1))
1199 tree ntype = TYPE_UNSIGNED (type)
1200 ? lang_hooks.types.signed_type (type)
1201 : lang_hooks.types.unsigned_type (type);
1202 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1203 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1204 return fold_convert (type, temp);
1216 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1217 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1218 return NULL_TREE. */
1221 negate_expr (tree t)
1228 type = TREE_TYPE (t);
1229 STRIP_SIGN_NOPS (t);
1231 tem = fold_negate_expr (t);
1233 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1234 return fold_convert (type, tem);
1237 /* Split a tree IN into a constant, literal and variable parts that could be
1238 combined with CODE to make IN. "constant" means an expression with
1239 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1240 commutative arithmetic operation. Store the constant part into *CONP,
1241 the literal in *LITP and return the variable part. If a part isn't
1242 present, set it to null. If the tree does not decompose in this way,
1243 return the entire tree as the variable part and the other parts as null.
1245 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1246 case, we negate an operand that was subtracted. Except if it is a
1247 literal for which we use *MINUS_LITP instead.
1249 If NEGATE_P is true, we are negating all of IN, again except a literal
1250 for which we use *MINUS_LITP instead.
1252 If IN is itself a literal or constant, return it as appropriate.
1254 Note that we do not guarantee that any of the three values will be the
1255 same type as IN, but they will have the same signedness and mode. */
1258 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1259 tree *minus_litp, int negate_p)
1267 /* Strip any conversions that don't change the machine mode or signedness. */
1268 STRIP_SIGN_NOPS (in);
1270 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1272 else if (TREE_CODE (in) == code
1273 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1274 /* We can associate addition and subtraction together (even
1275 though the C standard doesn't say so) for integers because
1276 the value is not affected. For reals, the value might be
1277 affected, so we can't. */
1278 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1279 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1281 tree op0 = TREE_OPERAND (in, 0);
1282 tree op1 = TREE_OPERAND (in, 1);
1283 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1284 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1286 /* First see if either of the operands is a literal, then a constant. */
1287 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1288 *litp = op0, op0 = 0;
1289 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1290 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1292 if (op0 != 0 && TREE_CONSTANT (op0))
1293 *conp = op0, op0 = 0;
1294 else if (op1 != 0 && TREE_CONSTANT (op1))
1295 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1297 /* If we haven't dealt with either operand, this is not a case we can
1298 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1299 if (op0 != 0 && op1 != 0)
1304 var = op1, neg_var_p = neg1_p;
1306 /* Now do any needed negations. */
1308 *minus_litp = *litp, *litp = 0;
1310 *conp = negate_expr (*conp);
1312 var = negate_expr (var);
1314 else if (TREE_CONSTANT (in))
1322 *minus_litp = *litp, *litp = 0;
1323 else if (*minus_litp)
1324 *litp = *minus_litp, *minus_litp = 0;
1325 *conp = negate_expr (*conp);
1326 var = negate_expr (var);
1332 /* Re-associate trees split by the above function. T1 and T2 are either
1333 expressions to associate or null. Return the new expression, if any. If
1334 we build an operation, do it in TYPE and with CODE. */
1337 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1344 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1345 try to fold this since we will have infinite recursion. But do
1346 deal with any NEGATE_EXPRs. */
1347 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1348 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1350 if (code == PLUS_EXPR)
1352 if (TREE_CODE (t1) == NEGATE_EXPR)
1353 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1354 fold_convert (type, TREE_OPERAND (t1, 0)));
1355 else if (TREE_CODE (t2) == NEGATE_EXPR)
1356 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1357 fold_convert (type, TREE_OPERAND (t2, 0)));
1358 else if (integer_zerop (t2))
1359 return fold_convert (type, t1);
1361 else if (code == MINUS_EXPR)
1363 if (integer_zerop (t2))
1364 return fold_convert (type, t1);
1367 return build2 (code, type, fold_convert (type, t1),
1368 fold_convert (type, t2));
1371 return fold_build2 (code, type, fold_convert (type, t1),
1372 fold_convert (type, t2));
1375 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1376 to produce a new constant. Return NULL_TREE if we don't know how
1377 to evaluate CODE at compile-time.
1379 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1382 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1384 unsigned HOST_WIDE_INT int1l, int2l;
1385 HOST_WIDE_INT int1h, int2h;
1386 unsigned HOST_WIDE_INT low;
1388 unsigned HOST_WIDE_INT garbagel;
1389 HOST_WIDE_INT garbageh;
1391 tree type = TREE_TYPE (arg1);
1392 int uns = TYPE_UNSIGNED (type);
1394 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1397 int1l = TREE_INT_CST_LOW (arg1);
1398 int1h = TREE_INT_CST_HIGH (arg1);
1399 int2l = TREE_INT_CST_LOW (arg2);
1400 int2h = TREE_INT_CST_HIGH (arg2);
1405 low = int1l | int2l, hi = int1h | int2h;
1409 low = int1l ^ int2l, hi = int1h ^ int2h;
1413 low = int1l & int2l, hi = int1h & int2h;
1419 /* It's unclear from the C standard whether shifts can overflow.
1420 The following code ignores overflow; perhaps a C standard
1421 interpretation ruling is needed. */
1422 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1429 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1434 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1438 neg_double (int2l, int2h, &low, &hi);
1439 add_double (int1l, int1h, low, hi, &low, &hi);
1440 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1444 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1447 case TRUNC_DIV_EXPR:
1448 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1449 case EXACT_DIV_EXPR:
1450 /* This is a shortcut for a common special case. */
1451 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1452 && ! TREE_CONSTANT_OVERFLOW (arg1)
1453 && ! TREE_CONSTANT_OVERFLOW (arg2)
1454 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1456 if (code == CEIL_DIV_EXPR)
1459 low = int1l / int2l, hi = 0;
1463 /* ... fall through ... */
1465 case ROUND_DIV_EXPR:
1466 if (int2h == 0 && int2l == 0)
1468 if (int2h == 0 && int2l == 1)
1470 low = int1l, hi = int1h;
1473 if (int1l == int2l && int1h == int2h
1474 && ! (int1l == 0 && int1h == 0))
1479 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1480 &low, &hi, &garbagel, &garbageh);
1483 case TRUNC_MOD_EXPR:
1484 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1485 /* This is a shortcut for a common special case. */
1486 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1487 && ! TREE_CONSTANT_OVERFLOW (arg1)
1488 && ! TREE_CONSTANT_OVERFLOW (arg2)
1489 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1491 if (code == CEIL_MOD_EXPR)
1493 low = int1l % int2l, hi = 0;
1497 /* ... fall through ... */
1499 case ROUND_MOD_EXPR:
1500 if (int2h == 0 && int2l == 0)
1502 overflow = div_and_round_double (code, uns,
1503 int1l, int1h, int2l, int2h,
1504 &garbagel, &garbageh, &low, &hi);
1510 low = (((unsigned HOST_WIDE_INT) int1h
1511 < (unsigned HOST_WIDE_INT) int2h)
1512 || (((unsigned HOST_WIDE_INT) int1h
1513 == (unsigned HOST_WIDE_INT) int2h)
1516 low = (int1h < int2h
1517 || (int1h == int2h && int1l < int2l));
1519 if (low == (code == MIN_EXPR))
1520 low = int1l, hi = int1h;
1522 low = int2l, hi = int2h;
1529 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1533 /* Propagate overflow flags ourselves. */
1534 if (((!uns || is_sizetype) && overflow)
1535 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1538 TREE_OVERFLOW (t) = 1;
1539 TREE_CONSTANT_OVERFLOW (t) = 1;
1541 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1544 TREE_CONSTANT_OVERFLOW (t) = 1;
1548 t = force_fit_type (t, 1,
1549 ((!uns || is_sizetype) && overflow)
1550 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1551 TREE_CONSTANT_OVERFLOW (arg1)
1552 | TREE_CONSTANT_OVERFLOW (arg2));
1557 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1558 constant. We assume ARG1 and ARG2 have the same data type, or at least
1559 are the same kind of constant and the same machine mode. Return zero if
1560 combining the constants is not allowed in the current operating mode.
1562 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1565 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1567 /* Sanity check for the recursive cases. */
1574 if (TREE_CODE (arg1) == INTEGER_CST)
1575 return int_const_binop (code, arg1, arg2, notrunc);
1577 if (TREE_CODE (arg1) == REAL_CST)
1579 enum machine_mode mode;
1582 REAL_VALUE_TYPE value;
1583 REAL_VALUE_TYPE result;
1587 /* The following codes are handled by real_arithmetic. */
1602 d1 = TREE_REAL_CST (arg1);
1603 d2 = TREE_REAL_CST (arg2);
1605 type = TREE_TYPE (arg1);
1606 mode = TYPE_MODE (type);
1608 /* Don't perform operation if we honor signaling NaNs and
1609 either operand is a NaN. */
1610 if (HONOR_SNANS (mode)
1611 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1614 /* Don't perform operation if it would raise a division
1615 by zero exception. */
1616 if (code == RDIV_EXPR
1617 && REAL_VALUES_EQUAL (d2, dconst0)
1618 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1621 /* If either operand is a NaN, just return it. Otherwise, set up
1622 for floating-point trap; we return an overflow. */
1623 if (REAL_VALUE_ISNAN (d1))
1625 else if (REAL_VALUE_ISNAN (d2))
1628 inexact = real_arithmetic (&value, code, &d1, &d2);
1629 real_convert (&result, mode, &value);
1631 /* Don't constant fold this floating point operation if
1632 the result has overflowed and flag_trapping_math. */
1633 if (flag_trapping_math
1634 && MODE_HAS_INFINITIES (mode)
1635 && REAL_VALUE_ISINF (result)
1636 && !REAL_VALUE_ISINF (d1)
1637 && !REAL_VALUE_ISINF (d2))
1640 /* Don't constant fold this floating point operation if the
1641 result may dependent upon the run-time rounding mode and
1642 flag_rounding_math is set, or if GCC's software emulation
1643 is unable to accurately represent the result. */
1644 if ((flag_rounding_math
1645 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1646 && !flag_unsafe_math_optimizations))
1647 && (inexact || !real_identical (&result, &value)))
1650 t = build_real (type, result);
1652 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1653 TREE_CONSTANT_OVERFLOW (t)
1655 | TREE_CONSTANT_OVERFLOW (arg1)
1656 | TREE_CONSTANT_OVERFLOW (arg2);
1660 if (TREE_CODE (arg1) == COMPLEX_CST)
1662 tree type = TREE_TYPE (arg1);
1663 tree r1 = TREE_REALPART (arg1);
1664 tree i1 = TREE_IMAGPART (arg1);
1665 tree r2 = TREE_REALPART (arg2);
1666 tree i2 = TREE_IMAGPART (arg2);
1673 real = const_binop (code, r1, r2, notrunc);
1674 imag = const_binop (code, i1, i2, notrunc);
1678 real = const_binop (MINUS_EXPR,
1679 const_binop (MULT_EXPR, r1, r2, notrunc),
1680 const_binop (MULT_EXPR, i1, i2, notrunc),
1682 imag = const_binop (PLUS_EXPR,
1683 const_binop (MULT_EXPR, r1, i2, notrunc),
1684 const_binop (MULT_EXPR, i1, r2, notrunc),
1691 = const_binop (PLUS_EXPR,
1692 const_binop (MULT_EXPR, r2, r2, notrunc),
1693 const_binop (MULT_EXPR, i2, i2, notrunc),
1696 = const_binop (PLUS_EXPR,
1697 const_binop (MULT_EXPR, r1, r2, notrunc),
1698 const_binop (MULT_EXPR, i1, i2, notrunc),
1701 = const_binop (MINUS_EXPR,
1702 const_binop (MULT_EXPR, i1, r2, notrunc),
1703 const_binop (MULT_EXPR, r1, i2, notrunc),
1706 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1707 code = TRUNC_DIV_EXPR;
1709 real = const_binop (code, t1, magsquared, notrunc);
1710 imag = const_binop (code, t2, magsquared, notrunc);
1719 return build_complex (type, real, imag);
1725 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1726 indicates which particular sizetype to create. */
1729 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1731 return build_int_cst (sizetype_tab[(int) kind], number);
1734 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1735 is a tree code. The type of the result is taken from the operands.
1736 Both must be the same type integer type and it must be a size type.
1737 If the operands are constant, so is the result. */
1740 size_binop (enum tree_code code, tree arg0, tree arg1)
1742 tree type = TREE_TYPE (arg0);
1744 if (arg0 == error_mark_node || arg1 == error_mark_node)
1745 return error_mark_node;
1747 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1748 && type == TREE_TYPE (arg1));
1750 /* Handle the special case of two integer constants faster. */
1751 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1753 /* And some specific cases even faster than that. */
1754 if (code == PLUS_EXPR && integer_zerop (arg0))
1756 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1757 && integer_zerop (arg1))
1759 else if (code == MULT_EXPR && integer_onep (arg0))
1762 /* Handle general case of two integer constants. */
1763 return int_const_binop (code, arg0, arg1, 0);
1766 return fold_build2 (code, type, arg0, arg1);
1769 /* Given two values, either both of sizetype or both of bitsizetype,
1770 compute the difference between the two values. Return the value
1771 in signed type corresponding to the type of the operands. */
1774 size_diffop (tree arg0, tree arg1)
1776 tree type = TREE_TYPE (arg0);
1779 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1780 && type == TREE_TYPE (arg1));
1782 /* If the type is already signed, just do the simple thing. */
1783 if (!TYPE_UNSIGNED (type))
1784 return size_binop (MINUS_EXPR, arg0, arg1);
1786 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1788 /* If either operand is not a constant, do the conversions to the signed
1789 type and subtract. The hardware will do the right thing with any
1790 overflow in the subtraction. */
1791 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1792 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1793 fold_convert (ctype, arg1));
1795 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1796 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1797 overflow) and negate (which can't either). Special-case a result
1798 of zero while we're here. */
1799 if (tree_int_cst_equal (arg0, arg1))
1800 return build_int_cst (ctype, 0);
1801 else if (tree_int_cst_lt (arg1, arg0))
1802 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1804 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1805 fold_convert (ctype, size_binop (MINUS_EXPR,
1809 /* A subroutine of fold_convert_const handling conversions of an
1810 INTEGER_CST to another integer type. */
1813 fold_convert_const_int_from_int (tree type, tree arg1)
1817 /* Given an integer constant, make new constant with new type,
1818 appropriately sign-extended or truncated. */
1819 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1820 TREE_INT_CST_HIGH (arg1));
1822 t = force_fit_type (t,
1823 /* Don't set the overflow when
1824 converting a pointer */
1825 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1826 (TREE_INT_CST_HIGH (arg1) < 0
1827 && (TYPE_UNSIGNED (type)
1828 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1829 | TREE_OVERFLOW (arg1),
1830 TREE_CONSTANT_OVERFLOW (arg1));
1835 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1836 to an integer type. */
1839 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1844 /* The following code implements the floating point to integer
1845 conversion rules required by the Java Language Specification,
1846 that IEEE NaNs are mapped to zero and values that overflow
1847 the target precision saturate, i.e. values greater than
1848 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1849 are mapped to INT_MIN. These semantics are allowed by the
1850 C and C++ standards that simply state that the behavior of
1851 FP-to-integer conversion is unspecified upon overflow. */
1853 HOST_WIDE_INT high, low;
1855 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1859 case FIX_TRUNC_EXPR:
1860 real_trunc (&r, VOIDmode, &x);
1864 real_ceil (&r, VOIDmode, &x);
1867 case FIX_FLOOR_EXPR:
1868 real_floor (&r, VOIDmode, &x);
1871 case FIX_ROUND_EXPR:
1872 real_round (&r, VOIDmode, &x);
1879 /* If R is NaN, return zero and show we have an overflow. */
1880 if (REAL_VALUE_ISNAN (r))
1887 /* See if R is less than the lower bound or greater than the
1892 tree lt = TYPE_MIN_VALUE (type);
1893 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1894 if (REAL_VALUES_LESS (r, l))
1897 high = TREE_INT_CST_HIGH (lt);
1898 low = TREE_INT_CST_LOW (lt);
1904 tree ut = TYPE_MAX_VALUE (type);
1907 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1908 if (REAL_VALUES_LESS (u, r))
1911 high = TREE_INT_CST_HIGH (ut);
1912 low = TREE_INT_CST_LOW (ut);
1918 REAL_VALUE_TO_INT (&low, &high, r);
1920 t = build_int_cst_wide (type, low, high);
1922 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1923 TREE_CONSTANT_OVERFLOW (arg1));
1927 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1928 to another floating point type. */
1931 fold_convert_const_real_from_real (tree type, tree arg1)
1933 REAL_VALUE_TYPE value;
1936 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1937 t = build_real (type, value);
1939 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1940 TREE_CONSTANT_OVERFLOW (t)
1941 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1945 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1946 type TYPE. If no simplification can be done return NULL_TREE. */
1949 fold_convert_const (enum tree_code code, tree type, tree arg1)
1951 if (TREE_TYPE (arg1) == type)
1954 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1956 if (TREE_CODE (arg1) == INTEGER_CST)
1957 return fold_convert_const_int_from_int (type, arg1);
1958 else if (TREE_CODE (arg1) == REAL_CST)
1959 return fold_convert_const_int_from_real (code, type, arg1);
1961 else if (TREE_CODE (type) == REAL_TYPE)
1963 if (TREE_CODE (arg1) == INTEGER_CST)
1964 return build_real_from_int_cst (type, arg1);
1965 if (TREE_CODE (arg1) == REAL_CST)
1966 return fold_convert_const_real_from_real (type, arg1);
1971 /* Construct a vector of zero elements of vector type TYPE. */
1974 build_zero_vector (tree type)
1979 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1980 units = TYPE_VECTOR_SUBPARTS (type);
1983 for (i = 0; i < units; i++)
1984 list = tree_cons (NULL_TREE, elem, list);
1985 return build_vector (type, list);
1988 /* Convert expression ARG to type TYPE. Used by the middle-end for
1989 simple conversions in preference to calling the front-end's convert. */
1992 fold_convert (tree type, tree arg)
1994 tree orig = TREE_TYPE (arg);
2000 if (TREE_CODE (arg) == ERROR_MARK
2001 || TREE_CODE (type) == ERROR_MARK
2002 || TREE_CODE (orig) == ERROR_MARK)
2003 return error_mark_node;
2005 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2006 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2007 TYPE_MAIN_VARIANT (orig)))
2008 return fold_build1 (NOP_EXPR, type, arg);
2010 switch (TREE_CODE (type))
2012 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2013 case POINTER_TYPE: case REFERENCE_TYPE:
2015 if (TREE_CODE (arg) == INTEGER_CST)
2017 tem = fold_convert_const (NOP_EXPR, type, arg);
2018 if (tem != NULL_TREE)
2021 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2022 || TREE_CODE (orig) == OFFSET_TYPE)
2023 return fold_build1 (NOP_EXPR, type, arg);
2024 if (TREE_CODE (orig) == COMPLEX_TYPE)
2026 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2027 return fold_convert (type, tem);
2029 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2030 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2031 return fold_build1 (NOP_EXPR, type, arg);
2034 if (TREE_CODE (arg) == INTEGER_CST)
2036 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2037 if (tem != NULL_TREE)
2040 else if (TREE_CODE (arg) == REAL_CST)
2042 tem = fold_convert_const (NOP_EXPR, type, arg);
2043 if (tem != NULL_TREE)
2047 switch (TREE_CODE (orig))
2050 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2051 case POINTER_TYPE: case REFERENCE_TYPE:
2052 return fold_build1 (FLOAT_EXPR, type, arg);
2055 return fold_build1 (NOP_EXPR, type, arg);
2058 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2059 return fold_convert (type, tem);
2066 switch (TREE_CODE (orig))
2069 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2070 case POINTER_TYPE: case REFERENCE_TYPE:
2072 return build2 (COMPLEX_EXPR, type,
2073 fold_convert (TREE_TYPE (type), arg),
2074 fold_convert (TREE_TYPE (type), integer_zero_node));
2079 if (TREE_CODE (arg) == COMPLEX_EXPR)
2081 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2082 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2083 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2086 arg = save_expr (arg);
2087 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2088 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2089 rpart = fold_convert (TREE_TYPE (type), rpart);
2090 ipart = fold_convert (TREE_TYPE (type), ipart);
2091 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2099 if (integer_zerop (arg))
2100 return build_zero_vector (type);
2101 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2102 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2103 || TREE_CODE (orig) == VECTOR_TYPE);
2104 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2107 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2114 /* Return false if expr can be assumed not to be an lvalue, true
2118 maybe_lvalue_p (tree x)
2120 /* We only need to wrap lvalue tree codes. */
2121 switch (TREE_CODE (x))
2132 case ALIGN_INDIRECT_REF:
2133 case MISALIGNED_INDIRECT_REF:
2135 case ARRAY_RANGE_REF:
2141 case PREINCREMENT_EXPR:
2142 case PREDECREMENT_EXPR:
2144 case TRY_CATCH_EXPR:
2145 case WITH_CLEANUP_EXPR:
2156 /* Assume the worst for front-end tree codes. */
2157 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2165 /* Return an expr equal to X but certainly not valid as an lvalue. */
2170 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2175 if (! maybe_lvalue_p (x))
2177 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2180 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2181 Zero means allow extended lvalues. */
2183 int pedantic_lvalues;
2185 /* When pedantic, return an expr equal to X but certainly not valid as a
2186 pedantic lvalue. Otherwise, return X. */
2189 pedantic_non_lvalue (tree x)
2191 if (pedantic_lvalues)
2192 return non_lvalue (x);
2197 /* Given a tree comparison code, return the code that is the logical inverse
2198 of the given code. It is not safe to do this for floating-point
2199 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2200 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2203 invert_tree_comparison (enum tree_code code, bool honor_nans)
2205 if (honor_nans && flag_trapping_math)
2215 return honor_nans ? UNLE_EXPR : LE_EXPR;
2217 return honor_nans ? UNLT_EXPR : LT_EXPR;
2219 return honor_nans ? UNGE_EXPR : GE_EXPR;
2221 return honor_nans ? UNGT_EXPR : GT_EXPR;
2235 return UNORDERED_EXPR;
2236 case UNORDERED_EXPR:
2237 return ORDERED_EXPR;
2243 /* Similar, but return the comparison that results if the operands are
2244 swapped. This is safe for floating-point. */
2247 swap_tree_comparison (enum tree_code code)
2254 case UNORDERED_EXPR:
2280 /* Convert a comparison tree code from an enum tree_code representation
2281 into a compcode bit-based encoding. This function is the inverse of
2282 compcode_to_comparison. */
2284 static enum comparison_code
2285 comparison_to_compcode (enum tree_code code)
2302 return COMPCODE_ORD;
2303 case UNORDERED_EXPR:
2304 return COMPCODE_UNORD;
2306 return COMPCODE_UNLT;
2308 return COMPCODE_UNEQ;
2310 return COMPCODE_UNLE;
2312 return COMPCODE_UNGT;
2314 return COMPCODE_LTGT;
2316 return COMPCODE_UNGE;
2322 /* Convert a compcode bit-based encoding of a comparison operator back
2323 to GCC's enum tree_code representation. This function is the
2324 inverse of comparison_to_compcode. */
2326 static enum tree_code
2327 compcode_to_comparison (enum comparison_code code)
2344 return ORDERED_EXPR;
2345 case COMPCODE_UNORD:
2346 return UNORDERED_EXPR;
2364 /* Return a tree for the comparison which is the combination of
2365 doing the AND or OR (depending on CODE) of the two operations LCODE
2366 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2367 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2368 if this makes the transformation invalid. */
2371 combine_comparisons (enum tree_code code, enum tree_code lcode,
2372 enum tree_code rcode, tree truth_type,
2373 tree ll_arg, tree lr_arg)
2375 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2376 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2377 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2378 enum comparison_code compcode;
2382 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2383 compcode = lcompcode & rcompcode;
2386 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2387 compcode = lcompcode | rcompcode;
2396 /* Eliminate unordered comparisons, as well as LTGT and ORD
2397 which are not used unless the mode has NaNs. */
2398 compcode &= ~COMPCODE_UNORD;
2399 if (compcode == COMPCODE_LTGT)
2400 compcode = COMPCODE_NE;
2401 else if (compcode == COMPCODE_ORD)
2402 compcode = COMPCODE_TRUE;
2404 else if (flag_trapping_math)
2406 /* Check that the original operation and the optimized ones will trap
2407 under the same condition. */
2408 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2409 && (lcompcode != COMPCODE_EQ)
2410 && (lcompcode != COMPCODE_ORD);
2411 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2412 && (rcompcode != COMPCODE_EQ)
2413 && (rcompcode != COMPCODE_ORD);
2414 bool trap = (compcode & COMPCODE_UNORD) == 0
2415 && (compcode != COMPCODE_EQ)
2416 && (compcode != COMPCODE_ORD);
2418 /* In a short-circuited boolean expression the LHS might be
2419 such that the RHS, if evaluated, will never trap. For
2420 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2421 if neither x nor y is NaN. (This is a mixed blessing: for
2422 example, the expression above will never trap, hence
2423 optimizing it to x < y would be invalid). */
2424 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2425 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2428 /* If the comparison was short-circuited, and only the RHS
2429 trapped, we may now generate a spurious trap. */
2431 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2434 /* If we changed the conditions that cause a trap, we lose. */
2435 if ((ltrap || rtrap) != trap)
2439 if (compcode == COMPCODE_TRUE)
2440 return constant_boolean_node (true, truth_type);
2441 else if (compcode == COMPCODE_FALSE)
2442 return constant_boolean_node (false, truth_type);
2444 return fold_build2 (compcode_to_comparison (compcode),
2445 truth_type, ll_arg, lr_arg);
2448 /* Return nonzero if CODE is a tree code that represents a truth value. */
2451 truth_value_p (enum tree_code code)
2453 return (TREE_CODE_CLASS (code) == tcc_comparison
2454 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2455 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2456 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2459 /* Return nonzero if two operands (typically of the same tree node)
2460 are necessarily equal. If either argument has side-effects this
2461 function returns zero. FLAGS modifies behavior as follows:
2463 If OEP_ONLY_CONST is set, only return nonzero for constants.
2464 This function tests whether the operands are indistinguishable;
2465 it does not test whether they are equal using C's == operation.
2466 The distinction is important for IEEE floating point, because
2467 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2468 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2470 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2471 even though it may hold multiple values during a function.
2472 This is because a GCC tree node guarantees that nothing else is
2473 executed between the evaluation of its "operands" (which may often
2474 be evaluated in arbitrary order). Hence if the operands themselves
2475 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2476 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2477 unset means assuming isochronic (or instantaneous) tree equivalence.
2478 Unless comparing arbitrary expression trees, such as from different
2479 statements, this flag can usually be left unset.
2481 If OEP_PURE_SAME is set, then pure functions with identical arguments
2482 are considered the same. It is used when the caller has other ways
2483 to ensure that global memory is unchanged in between. */
2486 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2488 /* If either is ERROR_MARK, they aren't equal. */
2489 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2492 /* If both types don't have the same signedness, then we can't consider
2493 them equal. We must check this before the STRIP_NOPS calls
2494 because they may change the signedness of the arguments. */
2495 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2498 /* If both types don't have the same precision, then it is not safe
2500 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2506 /* In case both args are comparisons but with different comparison
2507 code, try to swap the comparison operands of one arg to produce
2508 a match and compare that variant. */
2509 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2510 && COMPARISON_CLASS_P (arg0)
2511 && COMPARISON_CLASS_P (arg1))
2513 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2515 if (TREE_CODE (arg0) == swap_code)
2516 return operand_equal_p (TREE_OPERAND (arg0, 0),
2517 TREE_OPERAND (arg1, 1), flags)
2518 && operand_equal_p (TREE_OPERAND (arg0, 1),
2519 TREE_OPERAND (arg1, 0), flags);
2522 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2523 /* This is needed for conversions and for COMPONENT_REF.
2524 Might as well play it safe and always test this. */
2525 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2526 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2527 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2530 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2531 We don't care about side effects in that case because the SAVE_EXPR
2532 takes care of that for us. In all other cases, two expressions are
2533 equal if they have no side effects. If we have two identical
2534 expressions with side effects that should be treated the same due
2535 to the only side effects being identical SAVE_EXPR's, that will
2536 be detected in the recursive calls below. */
2537 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2538 && (TREE_CODE (arg0) == SAVE_EXPR
2539 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2542 /* Next handle constant cases, those for which we can return 1 even
2543 if ONLY_CONST is set. */
2544 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2545 switch (TREE_CODE (arg0))
2548 return (! TREE_CONSTANT_OVERFLOW (arg0)
2549 && ! TREE_CONSTANT_OVERFLOW (arg1)
2550 && tree_int_cst_equal (arg0, arg1));
2553 return (! TREE_CONSTANT_OVERFLOW (arg0)
2554 && ! TREE_CONSTANT_OVERFLOW (arg1)
2555 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2556 TREE_REAL_CST (arg1)));
2562 if (TREE_CONSTANT_OVERFLOW (arg0)
2563 || TREE_CONSTANT_OVERFLOW (arg1))
2566 v1 = TREE_VECTOR_CST_ELTS (arg0);
2567 v2 = TREE_VECTOR_CST_ELTS (arg1);
2570 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2573 v1 = TREE_CHAIN (v1);
2574 v2 = TREE_CHAIN (v2);
2581 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2583 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2587 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2588 && ! memcmp (TREE_STRING_POINTER (arg0),
2589 TREE_STRING_POINTER (arg1),
2590 TREE_STRING_LENGTH (arg0)));
2593 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2599 if (flags & OEP_ONLY_CONST)
2602 /* Define macros to test an operand from arg0 and arg1 for equality and a
2603 variant that allows null and views null as being different from any
2604 non-null value. In the latter case, if either is null, the both
2605 must be; otherwise, do the normal comparison. */
2606 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2607 TREE_OPERAND (arg1, N), flags)
2609 #define OP_SAME_WITH_NULL(N) \
2610 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2611 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2613 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2616 /* Two conversions are equal only if signedness and modes match. */
2617 switch (TREE_CODE (arg0))
2622 case FIX_TRUNC_EXPR:
2623 case FIX_FLOOR_EXPR:
2624 case FIX_ROUND_EXPR:
2625 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2626 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2636 case tcc_comparison:
2638 if (OP_SAME (0) && OP_SAME (1))
2641 /* For commutative ops, allow the other order. */
2642 return (commutative_tree_code (TREE_CODE (arg0))
2643 && operand_equal_p (TREE_OPERAND (arg0, 0),
2644 TREE_OPERAND (arg1, 1), flags)
2645 && operand_equal_p (TREE_OPERAND (arg0, 1),
2646 TREE_OPERAND (arg1, 0), flags));
2649 /* If either of the pointer (or reference) expressions we are
2650 dereferencing contain a side effect, these cannot be equal. */
2651 if (TREE_SIDE_EFFECTS (arg0)
2652 || TREE_SIDE_EFFECTS (arg1))
2655 switch (TREE_CODE (arg0))
2658 case ALIGN_INDIRECT_REF:
2659 case MISALIGNED_INDIRECT_REF:
2665 case ARRAY_RANGE_REF:
2666 /* Operands 2 and 3 may be null. */
2669 && OP_SAME_WITH_NULL (2)
2670 && OP_SAME_WITH_NULL (3));
2673 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2674 may be NULL when we're called to compare MEM_EXPRs. */
2675 return OP_SAME_WITH_NULL (0)
2677 && OP_SAME_WITH_NULL (2);
2680 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2686 case tcc_expression:
2687 switch (TREE_CODE (arg0))
2690 case TRUTH_NOT_EXPR:
2693 case TRUTH_ANDIF_EXPR:
2694 case TRUTH_ORIF_EXPR:
2695 return OP_SAME (0) && OP_SAME (1);
2697 case TRUTH_AND_EXPR:
2699 case TRUTH_XOR_EXPR:
2700 if (OP_SAME (0) && OP_SAME (1))
2703 /* Otherwise take into account this is a commutative operation. */
2704 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2705 TREE_OPERAND (arg1, 1), flags)
2706 && operand_equal_p (TREE_OPERAND (arg0, 1),
2707 TREE_OPERAND (arg1, 0), flags));
2710 /* If the CALL_EXPRs call different functions, then they
2711 clearly can not be equal. */
2716 unsigned int cef = call_expr_flags (arg0);
2717 if (flags & OEP_PURE_SAME)
2718 cef &= ECF_CONST | ECF_PURE;
2725 /* Now see if all the arguments are the same. operand_equal_p
2726 does not handle TREE_LIST, so we walk the operands here
2727 feeding them to operand_equal_p. */
2728 arg0 = TREE_OPERAND (arg0, 1);
2729 arg1 = TREE_OPERAND (arg1, 1);
2730 while (arg0 && arg1)
2732 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2736 arg0 = TREE_CHAIN (arg0);
2737 arg1 = TREE_CHAIN (arg1);
2740 /* If we get here and both argument lists are exhausted
2741 then the CALL_EXPRs are equal. */
2742 return ! (arg0 || arg1);
2748 case tcc_declaration:
2749 /* Consider __builtin_sqrt equal to sqrt. */
2750 return (TREE_CODE (arg0) == FUNCTION_DECL
2751 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2752 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2753 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2760 #undef OP_SAME_WITH_NULL
2763 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2764 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2766 When in doubt, return 0. */
2769 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2771 int unsignedp1, unsignedpo;
2772 tree primarg0, primarg1, primother;
2773 unsigned int correct_width;
2775 if (operand_equal_p (arg0, arg1, 0))
2778 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2779 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2782 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2783 and see if the inner values are the same. This removes any
2784 signedness comparison, which doesn't matter here. */
2785 primarg0 = arg0, primarg1 = arg1;
2786 STRIP_NOPS (primarg0);
2787 STRIP_NOPS (primarg1);
2788 if (operand_equal_p (primarg0, primarg1, 0))
2791 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2792 actual comparison operand, ARG0.
2794 First throw away any conversions to wider types
2795 already present in the operands. */
2797 primarg1 = get_narrower (arg1, &unsignedp1);
2798 primother = get_narrower (other, &unsignedpo);
2800 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2801 if (unsignedp1 == unsignedpo
2802 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2803 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2805 tree type = TREE_TYPE (arg0);
2807 /* Make sure shorter operand is extended the right way
2808 to match the longer operand. */
2809 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2810 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2812 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2819 /* See if ARG is an expression that is either a comparison or is performing
2820 arithmetic on comparisons. The comparisons must only be comparing
2821 two different values, which will be stored in *CVAL1 and *CVAL2; if
2822 they are nonzero it means that some operands have already been found.
2823 No variables may be used anywhere else in the expression except in the
2824 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2825 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2827 If this is true, return 1. Otherwise, return zero. */
2830 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2832 enum tree_code code = TREE_CODE (arg);
2833 enum tree_code_class class = TREE_CODE_CLASS (code);
2835 /* We can handle some of the tcc_expression cases here. */
2836 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2838 else if (class == tcc_expression
2839 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2840 || code == COMPOUND_EXPR))
2843 else if (class == tcc_expression && code == SAVE_EXPR
2844 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2846 /* If we've already found a CVAL1 or CVAL2, this expression is
2847 two complex to handle. */
2848 if (*cval1 || *cval2)
2858 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2861 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2862 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2863 cval1, cval2, save_p));
2868 case tcc_expression:
2869 if (code == COND_EXPR)
2870 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2871 cval1, cval2, save_p)
2872 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2873 cval1, cval2, save_p)
2874 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2875 cval1, cval2, save_p));
2878 case tcc_comparison:
2879 /* First see if we can handle the first operand, then the second. For
2880 the second operand, we know *CVAL1 can't be zero. It must be that
2881 one side of the comparison is each of the values; test for the
2882 case where this isn't true by failing if the two operands
2885 if (operand_equal_p (TREE_OPERAND (arg, 0),
2886 TREE_OPERAND (arg, 1), 0))
2890 *cval1 = TREE_OPERAND (arg, 0);
2891 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2893 else if (*cval2 == 0)
2894 *cval2 = TREE_OPERAND (arg, 0);
2895 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2900 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2902 else if (*cval2 == 0)
2903 *cval2 = TREE_OPERAND (arg, 1);
2904 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2916 /* ARG is a tree that is known to contain just arithmetic operations and
2917 comparisons. Evaluate the operations in the tree substituting NEW0 for
2918 any occurrence of OLD0 as an operand of a comparison and likewise for
2922 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2924 tree type = TREE_TYPE (arg);
2925 enum tree_code code = TREE_CODE (arg);
2926 enum tree_code_class class = TREE_CODE_CLASS (code);
2928 /* We can handle some of the tcc_expression cases here. */
2929 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2931 else if (class == tcc_expression
2932 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2938 return fold_build1 (code, type,
2939 eval_subst (TREE_OPERAND (arg, 0),
2940 old0, new0, old1, new1));
2943 return fold_build2 (code, type,
2944 eval_subst (TREE_OPERAND (arg, 0),
2945 old0, new0, old1, new1),
2946 eval_subst (TREE_OPERAND (arg, 1),
2947 old0, new0, old1, new1));
2949 case tcc_expression:
2953 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2956 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2959 return fold_build3 (code, type,
2960 eval_subst (TREE_OPERAND (arg, 0),
2961 old0, new0, old1, new1),
2962 eval_subst (TREE_OPERAND (arg, 1),
2963 old0, new0, old1, new1),
2964 eval_subst (TREE_OPERAND (arg, 2),
2965 old0, new0, old1, new1));
2969 /* Fall through - ??? */
2971 case tcc_comparison:
2973 tree arg0 = TREE_OPERAND (arg, 0);
2974 tree arg1 = TREE_OPERAND (arg, 1);
2976 /* We need to check both for exact equality and tree equality. The
2977 former will be true if the operand has a side-effect. In that
2978 case, we know the operand occurred exactly once. */
2980 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2982 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2985 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2987 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2990 return fold_build2 (code, type, arg0, arg1);
2998 /* Return a tree for the case when the result of an expression is RESULT
2999 converted to TYPE and OMITTED was previously an operand of the expression
3000 but is now not needed (e.g., we folded OMITTED * 0).
3002 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3003 the conversion of RESULT to TYPE. */
3006 omit_one_operand (tree type, tree result, tree omitted)
3008 tree t = fold_convert (type, result);
3010 if (TREE_SIDE_EFFECTS (omitted))
3011 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3013 return non_lvalue (t);
3016 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3019 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3021 tree t = fold_convert (type, result);
3023 if (TREE_SIDE_EFFECTS (omitted))
3024 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3026 return pedantic_non_lvalue (t);
3029 /* Return a tree for the case when the result of an expression is RESULT
3030 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3031 of the expression but are now not needed.
3033 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3034 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3035 evaluated before OMITTED2. Otherwise, if neither has side effects,
3036 just do the conversion of RESULT to TYPE. */
3039 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3041 tree t = fold_convert (type, result);
3043 if (TREE_SIDE_EFFECTS (omitted2))
3044 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3045 if (TREE_SIDE_EFFECTS (omitted1))
3046 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3048 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3052 /* Return a simplified tree node for the truth-negation of ARG. This
3053 never alters ARG itself. We assume that ARG is an operation that
3054 returns a truth value (0 or 1).
3056 FIXME: one would think we would fold the result, but it causes
3057 problems with the dominator optimizer. */
3060 fold_truth_not_expr (tree arg)
3062 tree type = TREE_TYPE (arg);
3063 enum tree_code code = TREE_CODE (arg);
3065 /* If this is a comparison, we can simply invert it, except for
3066 floating-point non-equality comparisons, in which case we just
3067 enclose a TRUTH_NOT_EXPR around what we have. */
3069 if (TREE_CODE_CLASS (code) == tcc_comparison)
3071 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3072 if (FLOAT_TYPE_P (op_type)
3073 && flag_trapping_math
3074 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3075 && code != NE_EXPR && code != EQ_EXPR)
3079 code = invert_tree_comparison (code,
3080 HONOR_NANS (TYPE_MODE (op_type)));
3081 if (code == ERROR_MARK)
3084 return build2 (code, type,
3085 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3092 return constant_boolean_node (integer_zerop (arg), type);
3094 case TRUTH_AND_EXPR:
3095 return build2 (TRUTH_OR_EXPR, type,
3096 invert_truthvalue (TREE_OPERAND (arg, 0)),
3097 invert_truthvalue (TREE_OPERAND (arg, 1)));
3100 return build2 (TRUTH_AND_EXPR, type,
3101 invert_truthvalue (TREE_OPERAND (arg, 0)),
3102 invert_truthvalue (TREE_OPERAND (arg, 1)));
3104 case TRUTH_XOR_EXPR:
3105 /* Here we can invert either operand. We invert the first operand
3106 unless the second operand is a TRUTH_NOT_EXPR in which case our
3107 result is the XOR of the first operand with the inside of the
3108 negation of the second operand. */
3110 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3111 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3112 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3114 return build2 (TRUTH_XOR_EXPR, type,
3115 invert_truthvalue (TREE_OPERAND (arg, 0)),
3116 TREE_OPERAND (arg, 1));
3118 case TRUTH_ANDIF_EXPR:
3119 return build2 (TRUTH_ORIF_EXPR, type,
3120 invert_truthvalue (TREE_OPERAND (arg, 0)),
3121 invert_truthvalue (TREE_OPERAND (arg, 1)));
3123 case TRUTH_ORIF_EXPR:
3124 return build2 (TRUTH_ANDIF_EXPR, type,
3125 invert_truthvalue (TREE_OPERAND (arg, 0)),
3126 invert_truthvalue (TREE_OPERAND (arg, 1)));
3128 case TRUTH_NOT_EXPR:
3129 return TREE_OPERAND (arg, 0);
3133 tree arg1 = TREE_OPERAND (arg, 1);
3134 tree arg2 = TREE_OPERAND (arg, 2);
3135 /* A COND_EXPR may have a throw as one operand, which
3136 then has void type. Just leave void operands
3138 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3139 VOID_TYPE_P (TREE_TYPE (arg1))
3140 ? arg1 : invert_truthvalue (arg1),
3141 VOID_TYPE_P (TREE_TYPE (arg2))
3142 ? arg2 : invert_truthvalue (arg2));
3146 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3147 invert_truthvalue (TREE_OPERAND (arg, 1)));
3149 case NON_LVALUE_EXPR:
3150 return invert_truthvalue (TREE_OPERAND (arg, 0));
3153 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3154 return build1 (TRUTH_NOT_EXPR, type, arg);
3158 return build1 (TREE_CODE (arg), type,
3159 invert_truthvalue (TREE_OPERAND (arg, 0)));
3162 if (!integer_onep (TREE_OPERAND (arg, 1)))
3164 return build2 (EQ_EXPR, type, arg,
3165 build_int_cst (type, 0));
3168 return build1 (TRUTH_NOT_EXPR, type, arg);
3170 case CLEANUP_POINT_EXPR:
3171 return build1 (CLEANUP_POINT_EXPR, type,
3172 invert_truthvalue (TREE_OPERAND (arg, 0)));
3181 /* Return a simplified tree node for the truth-negation of ARG. This
3182 never alters ARG itself. We assume that ARG is an operation that
3183 returns a truth value (0 or 1).
3185 FIXME: one would think we would fold the result, but it causes
3186 problems with the dominator optimizer. */
3189 invert_truthvalue (tree arg)
3193 if (TREE_CODE (arg) == ERROR_MARK)
3196 tem = fold_truth_not_expr (arg);
3198 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3203 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3204 operands are another bit-wise operation with a common input. If so,
3205 distribute the bit operations to save an operation and possibly two if
3206 constants are involved. For example, convert
3207 (A | B) & (A | C) into A | (B & C)
3208 Further simplification will occur if B and C are constants.
3210 If this optimization cannot be done, 0 will be returned. */
3213 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3218 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3219 || TREE_CODE (arg0) == code
3220 || (TREE_CODE (arg0) != BIT_AND_EXPR
3221 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3224 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3226 common = TREE_OPERAND (arg0, 0);
3227 left = TREE_OPERAND (arg0, 1);
3228 right = TREE_OPERAND (arg1, 1);
3230 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3232 common = TREE_OPERAND (arg0, 0);
3233 left = TREE_OPERAND (arg0, 1);
3234 right = TREE_OPERAND (arg1, 0);
3236 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3238 common = TREE_OPERAND (arg0, 1);
3239 left = TREE_OPERAND (arg0, 0);
3240 right = TREE_OPERAND (arg1, 1);
3242 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3244 common = TREE_OPERAND (arg0, 1);
3245 left = TREE_OPERAND (arg0, 0);
3246 right = TREE_OPERAND (arg1, 0);
3251 return fold_build2 (TREE_CODE (arg0), type, common,
3252 fold_build2 (code, type, left, right));
3255 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3256 with code CODE. This optimization is unsafe. */
3258 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3260 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3261 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3263 /* (A / C) +- (B / C) -> (A +- B) / C. */
3265 && operand_equal_p (TREE_OPERAND (arg0, 1),
3266 TREE_OPERAND (arg1, 1), 0))
3267 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3268 fold_build2 (code, type,
3269 TREE_OPERAND (arg0, 0),
3270 TREE_OPERAND (arg1, 0)),
3271 TREE_OPERAND (arg0, 1));
3273 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3274 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3275 TREE_OPERAND (arg1, 0), 0)
3276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3277 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3279 REAL_VALUE_TYPE r0, r1;
3280 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3281 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3283 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3285 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3286 real_arithmetic (&r0, code, &r0, &r1);
3287 return fold_build2 (MULT_EXPR, type,
3288 TREE_OPERAND (arg0, 0),
3289 build_real (type, r0));
3295 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3296 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3299 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3306 tree size = TYPE_SIZE (TREE_TYPE (inner));
3307 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3308 || POINTER_TYPE_P (TREE_TYPE (inner)))
3309 && host_integerp (size, 0)
3310 && tree_low_cst (size, 0) == bitsize)
3311 return fold_convert (type, inner);
3314 result = build3 (BIT_FIELD_REF, type, inner,
3315 size_int (bitsize), bitsize_int (bitpos));
3317 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3322 /* Optimize a bit-field compare.
3324 There are two cases: First is a compare against a constant and the
3325 second is a comparison of two items where the fields are at the same
3326 bit position relative to the start of a chunk (byte, halfword, word)
3327 large enough to contain it. In these cases we can avoid the shift
3328 implicit in bitfield extractions.
3330 For constants, we emit a compare of the shifted constant with the
3331 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3332 compared. For two fields at the same position, we do the ANDs with the
3333 similar mask and compare the result of the ANDs.
3335 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3336 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3337 are the left and right operands of the comparison, respectively.
3339 If the optimization described above can be done, we return the resulting
3340 tree. Otherwise we return zero. */
3343 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3346 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3347 tree type = TREE_TYPE (lhs);
3348 tree signed_type, unsigned_type;
3349 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3350 enum machine_mode lmode, rmode, nmode;
3351 int lunsignedp, runsignedp;
3352 int lvolatilep = 0, rvolatilep = 0;
3353 tree linner, rinner = NULL_TREE;
3357 /* Get all the information about the extractions being done. If the bit size
3358 if the same as the size of the underlying object, we aren't doing an
3359 extraction at all and so can do nothing. We also don't want to
3360 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3361 then will no longer be able to replace it. */
3362 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3363 &lunsignedp, &lvolatilep, false);
3364 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3365 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3370 /* If this is not a constant, we can only do something if bit positions,
3371 sizes, and signedness are the same. */
3372 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3373 &runsignedp, &rvolatilep, false);
3375 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3376 || lunsignedp != runsignedp || offset != 0
3377 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3381 /* See if we can find a mode to refer to this field. We should be able to,
3382 but fail if we can't. */
3383 nmode = get_best_mode (lbitsize, lbitpos,
3384 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3385 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3386 TYPE_ALIGN (TREE_TYPE (rinner))),
3387 word_mode, lvolatilep || rvolatilep);
3388 if (nmode == VOIDmode)
3391 /* Set signed and unsigned types of the precision of this mode for the
3393 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3394 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3396 /* Compute the bit position and size for the new reference and our offset
3397 within it. If the new reference is the same size as the original, we
3398 won't optimize anything, so return zero. */
3399 nbitsize = GET_MODE_BITSIZE (nmode);
3400 nbitpos = lbitpos & ~ (nbitsize - 1);
3402 if (nbitsize == lbitsize)
3405 if (BYTES_BIG_ENDIAN)
3406 lbitpos = nbitsize - lbitsize - lbitpos;
3408 /* Make the mask to be used against the extracted field. */
3409 mask = build_int_cst (unsigned_type, -1);
3410 mask = force_fit_type (mask, 0, false, false);
3411 mask = fold_convert (unsigned_type, mask);
3412 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3413 mask = const_binop (RSHIFT_EXPR, mask,
3414 size_int (nbitsize - lbitsize - lbitpos), 0);
3417 /* If not comparing with constant, just rework the comparison
3419 return build2 (code, compare_type,
3420 build2 (BIT_AND_EXPR, unsigned_type,
3421 make_bit_field_ref (linner, unsigned_type,
3422 nbitsize, nbitpos, 1),
3424 build2 (BIT_AND_EXPR, unsigned_type,
3425 make_bit_field_ref (rinner, unsigned_type,
3426 nbitsize, nbitpos, 1),
3429 /* Otherwise, we are handling the constant case. See if the constant is too
3430 big for the field. Warn and return a tree of for 0 (false) if so. We do
3431 this not only for its own sake, but to avoid having to test for this
3432 error case below. If we didn't, we might generate wrong code.
3434 For unsigned fields, the constant shifted right by the field length should
3435 be all zero. For signed fields, the high-order bits should agree with
3440 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3441 fold_convert (unsigned_type, rhs),
3442 size_int (lbitsize), 0)))
3444 warning (0, "comparison is always %d due to width of bit-field",
3446 return constant_boolean_node (code == NE_EXPR, compare_type);
3451 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3452 size_int (lbitsize - 1), 0);
3453 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3455 warning (0, "comparison is always %d due to width of bit-field",
3457 return constant_boolean_node (code == NE_EXPR, compare_type);
3461 /* Single-bit compares should always be against zero. */
3462 if (lbitsize == 1 && ! integer_zerop (rhs))
3464 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3465 rhs = build_int_cst (type, 0);
3468 /* Make a new bitfield reference, shift the constant over the
3469 appropriate number of bits and mask it with the computed mask
3470 (in case this was a signed field). If we changed it, make a new one. */
3471 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3474 TREE_SIDE_EFFECTS (lhs) = 1;
3475 TREE_THIS_VOLATILE (lhs) = 1;
3478 rhs = const_binop (BIT_AND_EXPR,
3479 const_binop (LSHIFT_EXPR,
3480 fold_convert (unsigned_type, rhs),
3481 size_int (lbitpos), 0),
3484 return build2 (code, compare_type,
3485 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3489 /* Subroutine for fold_truthop: decode a field reference.
3491 If EXP is a comparison reference, we return the innermost reference.
3493 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3494 set to the starting bit number.
3496 If the innermost field can be completely contained in a mode-sized
3497 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3499 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3500 otherwise it is not changed.
3502 *PUNSIGNEDP is set to the signedness of the field.
3504 *PMASK is set to the mask used. This is either contained in a
3505 BIT_AND_EXPR or derived from the width of the field.
3507 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3509 Return 0 if this is not a component reference or is one that we can't
3510 do anything with. */
3513 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3514 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3515 int *punsignedp, int *pvolatilep,
3516 tree *pmask, tree *pand_mask)
3518 tree outer_type = 0;
3520 tree mask, inner, offset;
3522 unsigned int precision;
3524 /* All the optimizations using this function assume integer fields.
3525 There are problems with FP fields since the type_for_size call
3526 below can fail for, e.g., XFmode. */
3527 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3530 /* We are interested in the bare arrangement of bits, so strip everything
3531 that doesn't affect the machine mode. However, record the type of the
3532 outermost expression if it may matter below. */
3533 if (TREE_CODE (exp) == NOP_EXPR
3534 || TREE_CODE (exp) == CONVERT_EXPR
3535 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3536 outer_type = TREE_TYPE (exp);
3539 if (TREE_CODE (exp) == BIT_AND_EXPR)
3541 and_mask = TREE_OPERAND (exp, 1);
3542 exp = TREE_OPERAND (exp, 0);
3543 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3544 if (TREE_CODE (and_mask) != INTEGER_CST)
3548 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3549 punsignedp, pvolatilep, false);
3550 if ((inner == exp && and_mask == 0)
3551 || *pbitsize < 0 || offset != 0
3552 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3555 /* If the number of bits in the reference is the same as the bitsize of
3556 the outer type, then the outer type gives the signedness. Otherwise
3557 (in case of a small bitfield) the signedness is unchanged. */
3558 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3559 *punsignedp = TYPE_UNSIGNED (outer_type);
3561 /* Compute the mask to access the bitfield. */
3562 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3563 precision = TYPE_PRECISION (unsigned_type);
3565 mask = build_int_cst (unsigned_type, -1);
3566 mask = force_fit_type (mask, 0, false, false);
3568 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3569 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3571 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3573 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3574 fold_convert (unsigned_type, and_mask), mask);
3577 *pand_mask = and_mask;
3581 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3585 all_ones_mask_p (tree mask, int size)
3587 tree type = TREE_TYPE (mask);
3588 unsigned int precision = TYPE_PRECISION (type);
3591 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3592 tmask = force_fit_type (tmask, 0, false, false);
3595 tree_int_cst_equal (mask,
3596 const_binop (RSHIFT_EXPR,
3597 const_binop (LSHIFT_EXPR, tmask,
3598 size_int (precision - size),
3600 size_int (precision - size), 0));
3603 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3604 represents the sign bit of EXP's type. If EXP represents a sign
3605 or zero extension, also test VAL against the unextended type.
3606 The return value is the (sub)expression whose sign bit is VAL,
3607 or NULL_TREE otherwise. */
3610 sign_bit_p (tree exp, tree val)
3612 unsigned HOST_WIDE_INT mask_lo, lo;
3613 HOST_WIDE_INT mask_hi, hi;
3617 /* Tree EXP must have an integral type. */
3618 t = TREE_TYPE (exp);
3619 if (! INTEGRAL_TYPE_P (t))
3622 /* Tree VAL must be an integer constant. */
3623 if (TREE_CODE (val) != INTEGER_CST
3624 || TREE_CONSTANT_OVERFLOW (val))
3627 width = TYPE_PRECISION (t);
3628 if (width > HOST_BITS_PER_WIDE_INT)
3630 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3633 mask_hi = ((unsigned HOST_WIDE_INT) -1
3634 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3640 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3643 mask_lo = ((unsigned HOST_WIDE_INT) -1
3644 >> (HOST_BITS_PER_WIDE_INT - width));
3647 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3648 treat VAL as if it were unsigned. */
3649 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3650 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3653 /* Handle extension from a narrower type. */
3654 if (TREE_CODE (exp) == NOP_EXPR
3655 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3656 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3661 /* Subroutine for fold_truthop: determine if an operand is simple enough
3662 to be evaluated unconditionally. */
3665 simple_operand_p (tree exp)
3667 /* Strip any conversions that don't change the machine mode. */
3670 return (CONSTANT_CLASS_P (exp)
3671 || TREE_CODE (exp) == SSA_NAME
3673 && ! TREE_ADDRESSABLE (exp)
3674 && ! TREE_THIS_VOLATILE (exp)
3675 && ! DECL_NONLOCAL (exp)
3676 /* Don't regard global variables as simple. They may be
3677 allocated in ways unknown to the compiler (shared memory,
3678 #pragma weak, etc). */
3679 && ! TREE_PUBLIC (exp)
3680 && ! DECL_EXTERNAL (exp)
3681 /* Loading a static variable is unduly expensive, but global
3682 registers aren't expensive. */
3683 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3686 /* The following functions are subroutines to fold_range_test and allow it to
3687 try to change a logical combination of comparisons into a range test.
3690 X == 2 || X == 3 || X == 4 || X == 5
3694 (unsigned) (X - 2) <= 3
3696 We describe each set of comparisons as being either inside or outside
3697 a range, using a variable named like IN_P, and then describe the
3698 range with a lower and upper bound. If one of the bounds is omitted,
3699 it represents either the highest or lowest value of the type.
3701 In the comments below, we represent a range by two numbers in brackets
3702 preceded by a "+" to designate being inside that range, or a "-" to
3703 designate being outside that range, so the condition can be inverted by
3704 flipping the prefix. An omitted bound is represented by a "-". For
3705 example, "- [-, 10]" means being outside the range starting at the lowest
3706 possible value and ending at 10, in other words, being greater than 10.
3707 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3710 We set up things so that the missing bounds are handled in a consistent
3711 manner so neither a missing bound nor "true" and "false" need to be
3712 handled using a special case. */
3714 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3715 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3716 and UPPER1_P are nonzero if the respective argument is an upper bound
3717 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3718 must be specified for a comparison. ARG1 will be converted to ARG0's
3719 type if both are specified. */
3722 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3723 tree arg1, int upper1_p)
3729 /* If neither arg represents infinity, do the normal operation.
3730 Else, if not a comparison, return infinity. Else handle the special
3731 comparison rules. Note that most of the cases below won't occur, but
3732 are handled for consistency. */
3734 if (arg0 != 0 && arg1 != 0)
3736 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3737 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3739 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3742 if (TREE_CODE_CLASS (code) != tcc_comparison)
3745 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3746 for neither. In real maths, we cannot assume open ended ranges are
3747 the same. But, this is computer arithmetic, where numbers are finite.
3748 We can therefore make the transformation of any unbounded range with
3749 the value Z, Z being greater than any representable number. This permits
3750 us to treat unbounded ranges as equal. */
3751 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3752 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3756 result = sgn0 == sgn1;
3759 result = sgn0 != sgn1;
3762 result = sgn0 < sgn1;
3765 result = sgn0 <= sgn1;
3768 result = sgn0 > sgn1;
3771 result = sgn0 >= sgn1;
3777 return constant_boolean_node (result, type);
3780 /* Given EXP, a logical expression, set the range it is testing into
3781 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3782 actually being tested. *PLOW and *PHIGH will be made of the same type
3783 as the returned expression. If EXP is not a comparison, we will most
3784 likely not be returning a useful value and range. */
3787 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3789 enum tree_code code;
3790 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3791 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3793 tree low, high, n_low, n_high;
3795 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3796 and see if we can refine the range. Some of the cases below may not
3797 happen, but it doesn't seem worth worrying about this. We "continue"
3798 the outer loop when we've changed something; otherwise we "break"
3799 the switch, which will "break" the while. */
3802 low = high = build_int_cst (TREE_TYPE (exp), 0);
3806 code = TREE_CODE (exp);
3807 exp_type = TREE_TYPE (exp);
3809 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3811 if (TREE_CODE_LENGTH (code) > 0)
3812 arg0 = TREE_OPERAND (exp, 0);
3813 if (TREE_CODE_CLASS (code) == tcc_comparison
3814 || TREE_CODE_CLASS (code) == tcc_unary
3815 || TREE_CODE_CLASS (code) == tcc_binary)
3816 arg0_type = TREE_TYPE (arg0);
3817 if (TREE_CODE_CLASS (code) == tcc_binary
3818 || TREE_CODE_CLASS (code) == tcc_comparison
3819 || (TREE_CODE_CLASS (code) == tcc_expression
3820 && TREE_CODE_LENGTH (code) > 1))
3821 arg1 = TREE_OPERAND (exp, 1);
3826 case TRUTH_NOT_EXPR:
3827 in_p = ! in_p, exp = arg0;
3830 case EQ_EXPR: case NE_EXPR:
3831 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3832 /* We can only do something if the range is testing for zero
3833 and if the second operand is an integer constant. Note that
3834 saying something is "in" the range we make is done by
3835 complementing IN_P since it will set in the initial case of
3836 being not equal to zero; "out" is leaving it alone. */
3837 if (low == 0 || high == 0
3838 || ! integer_zerop (low) || ! integer_zerop (high)
3839 || TREE_CODE (arg1) != INTEGER_CST)
3844 case NE_EXPR: /* - [c, c] */
3847 case EQ_EXPR: /* + [c, c] */
3848 in_p = ! in_p, low = high = arg1;
3850 case GT_EXPR: /* - [-, c] */
3851 low = 0, high = arg1;
3853 case GE_EXPR: /* + [c, -] */
3854 in_p = ! in_p, low = arg1, high = 0;
3856 case LT_EXPR: /* - [c, -] */
3857 low = arg1, high = 0;
3859 case LE_EXPR: /* + [-, c] */
3860 in_p = ! in_p, low = 0, high = arg1;
3866 /* If this is an unsigned comparison, we also know that EXP is
3867 greater than or equal to zero. We base the range tests we make
3868 on that fact, so we record it here so we can parse existing
3869 range tests. We test arg0_type since often the return type
3870 of, e.g. EQ_EXPR, is boolean. */
3871 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3873 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3875 build_int_cst (arg0_type, 0),
3879 in_p = n_in_p, low = n_low, high = n_high;
3881 /* If the high bound is missing, but we have a nonzero low
3882 bound, reverse the range so it goes from zero to the low bound
3884 if (high == 0 && low && ! integer_zerop (low))
3887 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3888 integer_one_node, 0);
3889 low = build_int_cst (arg0_type, 0);
3897 /* (-x) IN [a,b] -> x in [-b, -a] */
3898 n_low = range_binop (MINUS_EXPR, exp_type,
3899 build_int_cst (exp_type, 0),
3901 n_high = range_binop (MINUS_EXPR, exp_type,
3902 build_int_cst (exp_type, 0),
3904 low = n_low, high = n_high;
3910 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3911 build_int_cst (exp_type, 1));
3914 case PLUS_EXPR: case MINUS_EXPR:
3915 if (TREE_CODE (arg1) != INTEGER_CST)
3918 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3919 move a constant to the other side. */
3920 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3923 /* If EXP is signed, any overflow in the computation is undefined,
3924 so we don't worry about it so long as our computations on
3925 the bounds don't overflow. For unsigned, overflow is defined
3926 and this is exactly the right thing. */
3927 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3928 arg0_type, low, 0, arg1, 0);
3929 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3930 arg0_type, high, 1, arg1, 0);
3931 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3932 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3935 /* Check for an unsigned range which has wrapped around the maximum
3936 value thus making n_high < n_low, and normalize it. */
3937 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3939 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3940 integer_one_node, 0);
3941 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3942 integer_one_node, 0);
3944 /* If the range is of the form +/- [ x+1, x ], we won't
3945 be able to normalize it. But then, it represents the
3946 whole range or the empty set, so make it
3948 if (tree_int_cst_equal (n_low, low)
3949 && tree_int_cst_equal (n_high, high))
3955 low = n_low, high = n_high;
3960 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3961 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3964 if (! INTEGRAL_TYPE_P (arg0_type)
3965 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3966 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3969 n_low = low, n_high = high;
3972 n_low = fold_convert (arg0_type, n_low);
3975 n_high = fold_convert (arg0_type, n_high);
3978 /* If we're converting arg0 from an unsigned type, to exp,
3979 a signed type, we will be doing the comparison as unsigned.
3980 The tests above have already verified that LOW and HIGH
3983 So we have to ensure that we will handle large unsigned
3984 values the same way that the current signed bounds treat
3987 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3990 tree equiv_type = lang_hooks.types.type_for_mode
3991 (TYPE_MODE (arg0_type), 1);
3993 /* A range without an upper bound is, naturally, unbounded.
3994 Since convert would have cropped a very large value, use
3995 the max value for the destination type. */
3997 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3998 : TYPE_MAX_VALUE (arg0_type);
4000 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4001 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4002 fold_convert (arg0_type,
4004 fold_convert (arg0_type,
4007 /* If the low bound is specified, "and" the range with the
4008 range for which the original unsigned value will be
4012 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4013 1, n_low, n_high, 1,
4014 fold_convert (arg0_type,
4019 in_p = (n_in_p == in_p);
4023 /* Otherwise, "or" the range with the range of the input
4024 that will be interpreted as negative. */
4025 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4026 0, n_low, n_high, 1,
4027 fold_convert (arg0_type,
4032 in_p = (in_p != n_in_p);
4037 low = n_low, high = n_high;
4047 /* If EXP is a constant, we can evaluate whether this is true or false. */
4048 if (TREE_CODE (exp) == INTEGER_CST)
4050 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4052 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4058 *pin_p = in_p, *plow = low, *phigh = high;
4062 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4063 type, TYPE, return an expression to test if EXP is in (or out of, depending
4064 on IN_P) the range. Return 0 if the test couldn't be created. */
4067 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4069 tree etype = TREE_TYPE (exp);
4072 #ifdef HAVE_canonicalize_funcptr_for_compare
4073 /* Disable this optimization for function pointer expressions
4074 on targets that require function pointer canonicalization. */
4075 if (HAVE_canonicalize_funcptr_for_compare
4076 && TREE_CODE (etype) == POINTER_TYPE
4077 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4083 value = build_range_check (type, exp, 1, low, high);
4085 return invert_truthvalue (value);
4090 if (low == 0 && high == 0)
4091 return build_int_cst (type, 1);
4094 return fold_build2 (LE_EXPR, type, exp,
4095 fold_convert (etype, high));
4098 return fold_build2 (GE_EXPR, type, exp,
4099 fold_convert (etype, low));
4101 if (operand_equal_p (low, high, 0))
4102 return fold_build2 (EQ_EXPR, type, exp,
4103 fold_convert (etype, low));
4105 if (integer_zerop (low))
4107 if (! TYPE_UNSIGNED (etype))
4109 etype = lang_hooks.types.unsigned_type (etype);
4110 high = fold_convert (etype, high);
4111 exp = fold_convert (etype, exp);
4113 return build_range_check (type, exp, 1, 0, high);
4116 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4117 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4119 unsigned HOST_WIDE_INT lo;
4123 prec = TYPE_PRECISION (etype);
4124 if (prec <= HOST_BITS_PER_WIDE_INT)
4127 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4131 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4132 lo = (unsigned HOST_WIDE_INT) -1;
4135 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4137 if (TYPE_UNSIGNED (etype))
4139 etype = lang_hooks.types.signed_type (etype);
4140 exp = fold_convert (etype, exp);
4142 return fold_build2 (GT_EXPR, type, exp,
4143 build_int_cst (etype, 0));
4147 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4148 This requires wrap-around arithmetics for the type of the expression. */
4149 switch (TREE_CODE (etype))
4152 /* There is no requirement that LOW be within the range of ETYPE
4153 if the latter is a subtype. It must, however, be within the base
4154 type of ETYPE. So be sure we do the subtraction in that type. */
4155 if (TREE_TYPE (etype))
4156 etype = TREE_TYPE (etype);
4161 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4162 TYPE_UNSIGNED (etype));
4169 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4170 if (TREE_CODE (etype) == INTEGER_TYPE
4171 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4173 tree utype, minv, maxv;
4175 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4176 for the type in question, as we rely on this here. */
4177 utype = lang_hooks.types.unsigned_type (etype);
4178 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4179 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4180 integer_one_node, 1);
4181 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4183 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4190 high = fold_convert (etype, high);
4191 low = fold_convert (etype, low);
4192 exp = fold_convert (etype, exp);
4194 value = const_binop (MINUS_EXPR, high, low, 0);
4196 if (value != 0 && !TREE_OVERFLOW (value))
4197 return build_range_check (type,
4198 fold_build2 (MINUS_EXPR, etype, exp, low),
4199 1, build_int_cst (etype, 0), value);
4204 /* Return the predecessor of VAL in its type, handling the infinite case. */
4207 range_predecessor (tree val)
4209 tree type = TREE_TYPE (val);
4211 if (INTEGRAL_TYPE_P (type)
4212 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4215 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4218 /* Return the successor of VAL in its type, handling the infinite case. */
4221 range_successor (tree val)
4223 tree type = TREE_TYPE (val);
4225 if (INTEGRAL_TYPE_P (type)
4226 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4229 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4232 /* Given two ranges, see if we can merge them into one. Return 1 if we
4233 can, 0 if we can't. Set the output range into the specified parameters. */
4236 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4237 tree high0, int in1_p, tree low1, tree high1)
4245 int lowequal = ((low0 == 0 && low1 == 0)
4246 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4247 low0, 0, low1, 0)));
4248 int highequal = ((high0 == 0 && high1 == 0)
4249 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4250 high0, 1, high1, 1)));
4252 /* Make range 0 be the range that starts first, or ends last if they
4253 start at the same value. Swap them if it isn't. */
4254 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4257 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4258 high1, 1, high0, 1))))
4260 temp = in0_p, in0_p = in1_p, in1_p = temp;
4261 tem = low0, low0 = low1, low1 = tem;
4262 tem = high0, high0 = high1, high1 = tem;
4265 /* Now flag two cases, whether the ranges are disjoint or whether the
4266 second range is totally subsumed in the first. Note that the tests
4267 below are simplified by the ones above. */
4268 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4269 high0, 1, low1, 0));
4270 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4271 high1, 1, high0, 1));
4273 /* We now have four cases, depending on whether we are including or
4274 excluding the two ranges. */
4277 /* If they don't overlap, the result is false. If the second range
4278 is a subset it is the result. Otherwise, the range is from the start
4279 of the second to the end of the first. */
4281 in_p = 0, low = high = 0;
4283 in_p = 1, low = low1, high = high1;
4285 in_p = 1, low = low1, high = high0;
4288 else if (in0_p && ! in1_p)
4290 /* If they don't overlap, the result is the first range. If they are
4291 equal, the result is false. If the second range is a subset of the
4292 first, and the ranges begin at the same place, we go from just after
4293 the end of the second range to the end of the first. If the second
4294 range is not a subset of the first, or if it is a subset and both
4295 ranges end at the same place, the range starts at the start of the
4296 first range and ends just before the second range.
4297 Otherwise, we can't describe this as a single range. */
4299 in_p = 1, low = low0, high = high0;
4300 else if (lowequal && highequal)
4301 in_p = 0, low = high = 0;
4302 else if (subset && lowequal)
4304 low = range_successor (high1);
4308 else if (! subset || highequal)
4311 high = range_predecessor (low1);
4318 else if (! in0_p && in1_p)
4320 /* If they don't overlap, the result is the second range. If the second
4321 is a subset of the first, the result is false. Otherwise,
4322 the range starts just after the first range and ends at the
4323 end of the second. */
4325 in_p = 1, low = low1, high = high1;
4326 else if (subset || highequal)
4327 in_p = 0, low = high = 0;
4330 low = range_successor (high0);
4338 /* The case where we are excluding both ranges. Here the complex case
4339 is if they don't overlap. In that case, the only time we have a
4340 range is if they are adjacent. If the second is a subset of the
4341 first, the result is the first. Otherwise, the range to exclude
4342 starts at the beginning of the first range and ends at the end of the
4346 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4347 range_successor (high0),
4349 in_p = 0, low = low0, high = high1;
4352 /* Canonicalize - [min, x] into - [-, x]. */
4353 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4354 switch (TREE_CODE (TREE_TYPE (low0)))
4357 if (TYPE_PRECISION (TREE_TYPE (low0))
4358 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4362 if (tree_int_cst_equal (low0,
4363 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4367 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4368 && integer_zerop (low0))
4375 /* Canonicalize - [x, max] into - [x, -]. */
4376 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4377 switch (TREE_CODE (TREE_TYPE (high1)))
4380 if (TYPE_PRECISION (TREE_TYPE (high1))
4381 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4385 if (tree_int_cst_equal (high1,
4386 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4390 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4391 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4393 integer_one_node, 1)))
4400 /* The ranges might be also adjacent between the maximum and
4401 minimum values of the given type. For
4402 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4403 return + [x + 1, y - 1]. */
4404 if (low0 == 0 && high1 == 0)
4406 low = range_successor (high0);
4407 high = range_predecessor (low1);
4408 if (low == 0 || high == 0)
4418 in_p = 0, low = low0, high = high0;
4420 in_p = 0, low = low0, high = high1;
4423 *pin_p = in_p, *plow = low, *phigh = high;
4428 /* Subroutine of fold, looking inside expressions of the form
4429 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4430 of the COND_EXPR. This function is being used also to optimize
4431 A op B ? C : A, by reversing the comparison first.
4433 Return a folded expression whose code is not a COND_EXPR
4434 anymore, or NULL_TREE if no folding opportunity is found. */
4437 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4439 enum tree_code comp_code = TREE_CODE (arg0);
4440 tree arg00 = TREE_OPERAND (arg0, 0);
4441 tree arg01 = TREE_OPERAND (arg0, 1);
4442 tree arg1_type = TREE_TYPE (arg1);
4448 /* If we have A op 0 ? A : -A, consider applying the following
4451 A == 0? A : -A same as -A
4452 A != 0? A : -A same as A
4453 A >= 0? A : -A same as abs (A)
4454 A > 0? A : -A same as abs (A)
4455 A <= 0? A : -A same as -abs (A)
4456 A < 0? A : -A same as -abs (A)
4458 None of these transformations work for modes with signed
4459 zeros. If A is +/-0, the first two transformations will
4460 change the sign of the result (from +0 to -0, or vice
4461 versa). The last four will fix the sign of the result,
4462 even though the original expressions could be positive or
4463 negative, depending on the sign of A.
4465 Note that all these transformations are correct if A is
4466 NaN, since the two alternatives (A and -A) are also NaNs. */
4467 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4468 ? real_zerop (arg01)
4469 : integer_zerop (arg01))
4470 && ((TREE_CODE (arg2) == NEGATE_EXPR
4471 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4472 /* In the case that A is of the form X-Y, '-A' (arg2) may
4473 have already been folded to Y-X, check for that. */
4474 || (TREE_CODE (arg1) == MINUS_EXPR
4475 && TREE_CODE (arg2) == MINUS_EXPR
4476 && operand_equal_p (TREE_OPERAND (arg1, 0),
4477 TREE_OPERAND (arg2, 1), 0)
4478 && operand_equal_p (TREE_OPERAND (arg1, 1),
4479 TREE_OPERAND (arg2, 0), 0))))
4484 tem = fold_convert (arg1_type, arg1);
4485 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4488 return pedantic_non_lvalue (fold_convert (type, arg1));
4491 if (flag_trapping_math)
4496 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4497 arg1 = fold_convert (lang_hooks.types.signed_type
4498 (TREE_TYPE (arg1)), arg1);
4499 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4500 return pedantic_non_lvalue (fold_convert (type, tem));
4503 if (flag_trapping_math)
4507 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4508 arg1 = fold_convert (lang_hooks.types.signed_type
4509 (TREE_TYPE (arg1)), arg1);
4510 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4511 return negate_expr (fold_convert (type, tem));
4513 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4517 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4518 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4519 both transformations are correct when A is NaN: A != 0
4520 is then true, and A == 0 is false. */
4522 if (integer_zerop (arg01) && integer_zerop (arg2))
4524 if (comp_code == NE_EXPR)
4525 return pedantic_non_lvalue (fold_convert (type, arg1));
4526 else if (comp_code == EQ_EXPR)
4527 return build_int_cst (type, 0);
4530 /* Try some transformations of A op B ? A : B.
4532 A == B? A : B same as B
4533 A != B? A : B same as A
4534 A >= B? A : B same as max (A, B)
4535 A > B? A : B same as max (B, A)
4536 A <= B? A : B same as min (A, B)
4537 A < B? A : B same as min (B, A)
4539 As above, these transformations don't work in the presence
4540 of signed zeros. For example, if A and B are zeros of
4541 opposite sign, the first two transformations will change
4542 the sign of the result. In the last four, the original
4543 expressions give different results for (A=+0, B=-0) and
4544 (A=-0, B=+0), but the transformed expressions do not.
4546 The first two transformations are correct if either A or B
4547 is a NaN. In the first transformation, the condition will
4548 be false, and B will indeed be chosen. In the case of the
4549 second transformation, the condition A != B will be true,
4550 and A will be chosen.
4552 The conversions to max() and min() are not correct if B is
4553 a number and A is not. The conditions in the original
4554 expressions will be false, so all four give B. The min()
4555 and max() versions would give a NaN instead. */
4556 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4557 /* Avoid these transformations if the COND_EXPR may be used
4558 as an lvalue in the C++ front-end. PR c++/19199. */
4560 || (strcmp (lang_hooks.name, "GNU C++") != 0
4561 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4562 || ! maybe_lvalue_p (arg1)
4563 || ! maybe_lvalue_p (arg2)))
4565 tree comp_op0 = arg00;
4566 tree comp_op1 = arg01;
4567 tree comp_type = TREE_TYPE (comp_op0);
4569 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4570 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4580 return pedantic_non_lvalue (fold_convert (type, arg2));
4582 return pedantic_non_lvalue (fold_convert (type, arg1));
4587 /* In C++ a ?: expression can be an lvalue, so put the
4588 operand which will be used if they are equal first
4589 so that we can convert this back to the
4590 corresponding COND_EXPR. */
4591 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4593 comp_op0 = fold_convert (comp_type, comp_op0);
4594 comp_op1 = fold_convert (comp_type, comp_op1);
4595 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4596 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4597 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4598 return pedantic_non_lvalue (fold_convert (type, tem));
4605 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4607 comp_op0 = fold_convert (comp_type, comp_op0);
4608 comp_op1 = fold_convert (comp_type, comp_op1);
4609 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4610 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4611 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4612 return pedantic_non_lvalue (fold_convert (type, tem));
4616 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4617 return pedantic_non_lvalue (fold_convert (type, arg2));
4620 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4621 return pedantic_non_lvalue (fold_convert (type, arg1));
4624 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4629 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4630 we might still be able to simplify this. For example,
4631 if C1 is one less or one more than C2, this might have started
4632 out as a MIN or MAX and been transformed by this function.
4633 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4635 if (INTEGRAL_TYPE_P (type)
4636 && TREE_CODE (arg01) == INTEGER_CST
4637 && TREE_CODE (arg2) == INTEGER_CST)
4641 /* We can replace A with C1 in this case. */
4642 arg1 = fold_convert (type, arg01);
4643 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4646 /* If C1 is C2 + 1, this is min(A, C2). */
4647 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4649 && operand_equal_p (arg01,
4650 const_binop (PLUS_EXPR, arg2,
4651 integer_one_node, 0),
4653 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4658 /* If C1 is C2 - 1, this is min(A, C2). */
4659 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4661 && operand_equal_p (arg01,
4662 const_binop (MINUS_EXPR, arg2,
4663 integer_one_node, 0),
4665 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4670 /* If C1 is C2 - 1, this is max(A, C2). */
4671 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4673 && operand_equal_p (arg01,
4674 const_binop (MINUS_EXPR, arg2,
4675 integer_one_node, 0),
4677 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4682 /* If C1 is C2 + 1, this is max(A, C2). */
4683 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4685 && operand_equal_p (arg01,
4686 const_binop (PLUS_EXPR, arg2,
4687 integer_one_node, 0),
4689 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4703 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4704 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4707 /* EXP is some logical combination of boolean tests. See if we can
4708 merge it into some range test. Return the new tree if so. */
4711 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4713 int or_op = (code == TRUTH_ORIF_EXPR
4714 || code == TRUTH_OR_EXPR);
4715 int in0_p, in1_p, in_p;
4716 tree low0, low1, low, high0, high1, high;
4717 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4718 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4721 /* If this is an OR operation, invert both sides; we will invert
4722 again at the end. */
4724 in0_p = ! in0_p, in1_p = ! in1_p;
4726 /* If both expressions are the same, if we can merge the ranges, and we
4727 can build the range test, return it or it inverted. If one of the
4728 ranges is always true or always false, consider it to be the same
4729 expression as the other. */
4730 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4731 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4733 && 0 != (tem = (build_range_check (type,
4735 : rhs != 0 ? rhs : integer_zero_node,
4737 return or_op ? invert_truthvalue (tem) : tem;
4739 /* On machines where the branch cost is expensive, if this is a
4740 short-circuited branch and the underlying object on both sides
4741 is the same, make a non-short-circuit operation. */
4742 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4743 && lhs != 0 && rhs != 0
4744 && (code == TRUTH_ANDIF_EXPR
4745 || code == TRUTH_ORIF_EXPR)
4746 && operand_equal_p (lhs, rhs, 0))
4748 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4749 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4750 which cases we can't do this. */
4751 if (simple_operand_p (lhs))
4752 return build2 (code == TRUTH_ANDIF_EXPR
4753 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4756 else if (lang_hooks.decls.global_bindings_p () == 0
4757 && ! CONTAINS_PLACEHOLDER_P (lhs))
4759 tree common = save_expr (lhs);
4761 if (0 != (lhs = build_range_check (type, common,
4762 or_op ? ! in0_p : in0_p,
4764 && (0 != (rhs = build_range_check (type, common,
4765 or_op ? ! in1_p : in1_p,
4767 return build2 (code == TRUTH_ANDIF_EXPR
4768 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4776 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4777 bit value. Arrange things so the extra bits will be set to zero if and
4778 only if C is signed-extended to its full width. If MASK is nonzero,
4779 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4782 unextend (tree c, int p, int unsignedp, tree mask)
4784 tree type = TREE_TYPE (c);
4785 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4788 if (p == modesize || unsignedp)
4791 /* We work by getting just the sign bit into the low-order bit, then
4792 into the high-order bit, then sign-extend. We then XOR that value
4794 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4795 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4797 /* We must use a signed type in order to get an arithmetic right shift.
4798 However, we must also avoid introducing accidental overflows, so that
4799 a subsequent call to integer_zerop will work. Hence we must
4800 do the type conversion here. At this point, the constant is either
4801 zero or one, and the conversion to a signed type can never overflow.
4802 We could get an overflow if this conversion is done anywhere else. */
4803 if (TYPE_UNSIGNED (type))
4804 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4806 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4807 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4809 temp = const_binop (BIT_AND_EXPR, temp,
4810 fold_convert (TREE_TYPE (c), mask), 0);
4811 /* If necessary, convert the type back to match the type of C. */
4812 if (TYPE_UNSIGNED (type))
4813 temp = fold_convert (type, temp);
4815 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4818 /* Find ways of folding logical expressions of LHS and RHS:
4819 Try to merge two comparisons to the same innermost item.
4820 Look for range tests like "ch >= '0' && ch <= '9'".
4821 Look for combinations of simple terms on machines with expensive branches
4822 and evaluate the RHS unconditionally.
4824 For example, if we have p->a == 2 && p->b == 4 and we can make an
4825 object large enough to span both A and B, we can do this with a comparison
4826 against the object ANDed with the a mask.
4828 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4829 operations to do this with one comparison.
4831 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4832 function and the one above.
4834 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4835 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4837 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4840 We return the simplified tree or 0 if no optimization is possible. */
4843 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4845 /* If this is the "or" of two comparisons, we can do something if
4846 the comparisons are NE_EXPR. If this is the "and", we can do something
4847 if the comparisons are EQ_EXPR. I.e.,
4848 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4850 WANTED_CODE is this operation code. For single bit fields, we can
4851 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4852 comparison for one-bit fields. */
4854 enum tree_code wanted_code;
4855 enum tree_code lcode, rcode;
4856 tree ll_arg, lr_arg, rl_arg, rr_arg;
4857 tree ll_inner, lr_inner, rl_inner, rr_inner;
4858 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4859 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4860 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4861 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4862 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4863 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4864 enum machine_mode lnmode, rnmode;
4865 tree ll_mask, lr_mask, rl_mask, rr_mask;
4866 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4867 tree l_const, r_const;
4868 tree lntype, rntype, result;
4869 int first_bit, end_bit;
4871 tree orig_lhs = lhs, orig_rhs = rhs;
4872 enum tree_code orig_code = code;
4874 /* Start by getting the comparison codes. Fail if anything is volatile.
4875 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4876 it were surrounded with a NE_EXPR. */
4878 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4881 lcode = TREE_CODE (lhs);
4882 rcode = TREE_CODE (rhs);
4884 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4886 lhs = build2 (NE_EXPR, truth_type, lhs,
4887 build_int_cst (TREE_TYPE (lhs), 0));
4891 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4893 rhs = build2 (NE_EXPR, truth_type, rhs,
4894 build_int_cst (TREE_TYPE (rhs), 0));
4898 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4899 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4902 ll_arg = TREE_OPERAND (lhs, 0);
4903 lr_arg = TREE_OPERAND (lhs, 1);
4904 rl_arg = TREE_OPERAND (rhs, 0);
4905 rr_arg = TREE_OPERAND (rhs, 1);
4907 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4908 if (simple_operand_p (ll_arg)
4909 && simple_operand_p (lr_arg))
4912 if (operand_equal_p (ll_arg, rl_arg, 0)
4913 && operand_equal_p (lr_arg, rr_arg, 0))
4915 result = combine_comparisons (code, lcode, rcode,
4916 truth_type, ll_arg, lr_arg);
4920 else if (operand_equal_p (ll_arg, rr_arg, 0)
4921 && operand_equal_p (lr_arg, rl_arg, 0))
4923 result = combine_comparisons (code, lcode,
4924 swap_tree_comparison (rcode),
4925 truth_type, ll_arg, lr_arg);
4931 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4932 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4934 /* If the RHS can be evaluated unconditionally and its operands are
4935 simple, it wins to evaluate the RHS unconditionally on machines
4936 with expensive branches. In this case, this isn't a comparison
4937 that can be merged. Avoid doing this if the RHS is a floating-point
4938 comparison since those can trap. */
4940 if (BRANCH_COST >= 2
4941 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4942 && simple_operand_p (rl_arg)
4943 && simple_operand_p (rr_arg))
4945 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4946 if (code == TRUTH_OR_EXPR
4947 && lcode == NE_EXPR && integer_zerop (lr_arg)
4948 && rcode == NE_EXPR && integer_zerop (rr_arg)
4949 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4950 return build2 (NE_EXPR, truth_type,
4951 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4953 build_int_cst (TREE_TYPE (ll_arg), 0));
4955 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4956 if (code == TRUTH_AND_EXPR
4957 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4958 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4959 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4960 return build2 (EQ_EXPR, truth_type,
4961 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4963 build_int_cst (TREE_TYPE (ll_arg), 0));
4965 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4967 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
4968 return build2 (code, truth_type, lhs, rhs);
4973 /* See if the comparisons can be merged. Then get all the parameters for
4976 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4977 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4981 ll_inner = decode_field_reference (ll_arg,
4982 &ll_bitsize, &ll_bitpos, &ll_mode,
4983 &ll_unsignedp, &volatilep, &ll_mask,
4985 lr_inner = decode_field_reference (lr_arg,
4986 &lr_bitsize, &lr_bitpos, &lr_mode,
4987 &lr_unsignedp, &volatilep, &lr_mask,
4989 rl_inner = decode_field_reference (rl_arg,
4990 &rl_bitsize, &rl_bitpos, &rl_mode,
4991 &rl_unsignedp, &volatilep, &rl_mask,
4993 rr_inner = decode_field_reference (rr_arg,
4994 &rr_bitsize, &rr_bitpos, &rr_mode,
4995 &rr_unsignedp, &volatilep, &rr_mask,
4998 /* It must be true that the inner operation on the lhs of each
4999 comparison must be the same if we are to be able to do anything.
5000 Then see if we have constants. If not, the same must be true for
5002 if (volatilep || ll_inner == 0 || rl_inner == 0
5003 || ! operand_equal_p (ll_inner, rl_inner, 0))
5006 if (TREE_CODE (lr_arg) == INTEGER_CST
5007 && TREE_CODE (rr_arg) == INTEGER_CST)
5008 l_const = lr_arg, r_const = rr_arg;
5009 else if (lr_inner == 0 || rr_inner == 0
5010 || ! operand_equal_p (lr_inner, rr_inner, 0))
5013 l_const = r_const = 0;
5015 /* If either comparison code is not correct for our logical operation,
5016 fail. However, we can convert a one-bit comparison against zero into
5017 the opposite comparison against that bit being set in the field. */
5019 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5020 if (lcode != wanted_code)
5022 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5024 /* Make the left operand unsigned, since we are only interested
5025 in the value of one bit. Otherwise we are doing the wrong
5034 /* This is analogous to the code for l_const above. */
5035 if (rcode != wanted_code)
5037 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5046 /* After this point all optimizations will generate bit-field
5047 references, which we might not want. */
5048 if (! lang_hooks.can_use_bit_fields_p ())
5051 /* See if we can find a mode that contains both fields being compared on
5052 the left. If we can't, fail. Otherwise, update all constants and masks
5053 to be relative to a field of that size. */
5054 first_bit = MIN (ll_bitpos, rl_bitpos);
5055 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5056 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5057 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5059 if (lnmode == VOIDmode)
5062 lnbitsize = GET_MODE_BITSIZE (lnmode);
5063 lnbitpos = first_bit & ~ (lnbitsize - 1);
5064 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5065 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5067 if (BYTES_BIG_ENDIAN)
5069 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5070 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5073 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5074 size_int (xll_bitpos), 0);
5075 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5076 size_int (xrl_bitpos), 0);
5080 l_const = fold_convert (lntype, l_const);
5081 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5082 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5083 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5084 fold_build1 (BIT_NOT_EXPR,
5088 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5090 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5095 r_const = fold_convert (lntype, r_const);
5096 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5097 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5098 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5099 fold_build1 (BIT_NOT_EXPR,
5103 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5105 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5109 /* If the right sides are not constant, do the same for it. Also,
5110 disallow this optimization if a size or signedness mismatch occurs
5111 between the left and right sides. */
5114 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5115 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5116 /* Make sure the two fields on the right
5117 correspond to the left without being swapped. */
5118 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5121 first_bit = MIN (lr_bitpos, rr_bitpos);
5122 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5123 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5124 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5126 if (rnmode == VOIDmode)
5129 rnbitsize = GET_MODE_BITSIZE (rnmode);
5130 rnbitpos = first_bit & ~ (rnbitsize - 1);
5131 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5132 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5134 if (BYTES_BIG_ENDIAN)
5136 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5137 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5140 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5141 size_int (xlr_bitpos), 0);
5142 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5143 size_int (xrr_bitpos), 0);
5145 /* Make a mask that corresponds to both fields being compared.
5146 Do this for both items being compared. If the operands are the
5147 same size and the bits being compared are in the same position
5148 then we can do this by masking both and comparing the masked
5150 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5151 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5152 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5154 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5155 ll_unsignedp || rl_unsignedp);
5156 if (! all_ones_mask_p (ll_mask, lnbitsize))
5157 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5159 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5160 lr_unsignedp || rr_unsignedp);
5161 if (! all_ones_mask_p (lr_mask, rnbitsize))
5162 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5164 return build2 (wanted_code, truth_type, lhs, rhs);
5167 /* There is still another way we can do something: If both pairs of
5168 fields being compared are adjacent, we may be able to make a wider
5169 field containing them both.
5171 Note that we still must mask the lhs/rhs expressions. Furthermore,
5172 the mask must be shifted to account for the shift done by
5173 make_bit_field_ref. */
5174 if ((ll_bitsize + ll_bitpos == rl_bitpos
5175 && lr_bitsize + lr_bitpos == rr_bitpos)
5176 || (ll_bitpos == rl_bitpos + rl_bitsize
5177 && lr_bitpos == rr_bitpos + rr_bitsize))
5181 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5182 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5183 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5184 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5186 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5187 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5188 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5189 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5191 /* Convert to the smaller type before masking out unwanted bits. */
5193 if (lntype != rntype)
5195 if (lnbitsize > rnbitsize)
5197 lhs = fold_convert (rntype, lhs);
5198 ll_mask = fold_convert (rntype, ll_mask);
5201 else if (lnbitsize < rnbitsize)
5203 rhs = fold_convert (lntype, rhs);
5204 lr_mask = fold_convert (lntype, lr_mask);
5209 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5210 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5212 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5213 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5215 return build2 (wanted_code, truth_type, lhs, rhs);
5221 /* Handle the case of comparisons with constants. If there is something in
5222 common between the masks, those bits of the constants must be the same.
5223 If not, the condition is always false. Test for this to avoid generating
5224 incorrect code below. */
5225 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5226 if (! integer_zerop (result)
5227 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5228 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5230 if (wanted_code == NE_EXPR)
5232 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5233 return constant_boolean_node (true, truth_type);
5237 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5238 return constant_boolean_node (false, truth_type);
5242 /* Construct the expression we will return. First get the component
5243 reference we will make. Unless the mask is all ones the width of
5244 that field, perform the mask operation. Then compare with the
5246 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5247 ll_unsignedp || rl_unsignedp);
5249 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5250 if (! all_ones_mask_p (ll_mask, lnbitsize))
5251 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5253 return build2 (wanted_code, truth_type, result,
5254 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5257 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5261 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5264 enum tree_code op_code;
5265 tree comp_const = op1;
5267 int consts_equal, consts_lt;
5270 STRIP_SIGN_NOPS (arg0);
5272 op_code = TREE_CODE (arg0);
5273 minmax_const = TREE_OPERAND (arg0, 1);
5274 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5275 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5276 inner = TREE_OPERAND (arg0, 0);
5278 /* If something does not permit us to optimize, return the original tree. */
5279 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5280 || TREE_CODE (comp_const) != INTEGER_CST
5281 || TREE_CONSTANT_OVERFLOW (comp_const)
5282 || TREE_CODE (minmax_const) != INTEGER_CST
5283 || TREE_CONSTANT_OVERFLOW (minmax_const))
5286 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5287 and GT_EXPR, doing the rest with recursive calls using logical
5291 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5293 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5296 return invert_truthvalue (tem);
5302 fold_build2 (TRUTH_ORIF_EXPR, type,
5303 optimize_minmax_comparison
5304 (EQ_EXPR, type, arg0, comp_const),
5305 optimize_minmax_comparison
5306 (GT_EXPR, type, arg0, comp_const));
5309 if (op_code == MAX_EXPR && consts_equal)
5310 /* MAX (X, 0) == 0 -> X <= 0 */
5311 return fold_build2 (LE_EXPR, type, inner, comp_const);
5313 else if (op_code == MAX_EXPR && consts_lt)
5314 /* MAX (X, 0) == 5 -> X == 5 */
5315 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5317 else if (op_code == MAX_EXPR)
5318 /* MAX (X, 0) == -1 -> false */
5319 return omit_one_operand (type, integer_zero_node, inner);
5321 else if (consts_equal)
5322 /* MIN (X, 0) == 0 -> X >= 0 */
5323 return fold_build2 (GE_EXPR, type, inner, comp_const);
5326 /* MIN (X, 0) == 5 -> false */
5327 return omit_one_operand (type, integer_zero_node, inner);
5330 /* MIN (X, 0) == -1 -> X == -1 */
5331 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5334 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5335 /* MAX (X, 0) > 0 -> X > 0
5336 MAX (X, 0) > 5 -> X > 5 */
5337 return fold_build2 (GT_EXPR, type, inner, comp_const);
5339 else if (op_code == MAX_EXPR)
5340 /* MAX (X, 0) > -1 -> true */
5341 return omit_one_operand (type, integer_one_node, inner);
5343 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5344 /* MIN (X, 0) > 0 -> false
5345 MIN (X, 0) > 5 -> false */
5346 return omit_one_operand (type, integer_zero_node, inner);
5349 /* MIN (X, 0) > -1 -> X > -1 */
5350 return fold_build2 (GT_EXPR, type, inner, comp_const);
5357 /* T is an integer expression that is being multiplied, divided, or taken a
5358 modulus (CODE says which and what kind of divide or modulus) by a
5359 constant C. See if we can eliminate that operation by folding it with
5360 other operations already in T. WIDE_TYPE, if non-null, is a type that
5361 should be used for the computation if wider than our type.
5363 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5364 (X * 2) + (Y * 4). We must, however, be assured that either the original
5365 expression would not overflow or that overflow is undefined for the type
5366 in the language in question.
5368 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5369 the machine has a multiply-accumulate insn or that this is part of an
5370 addressing calculation.
5372 If we return a non-null expression, it is an equivalent form of the
5373 original computation, but need not be in the original type. */
5376 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5378 /* To avoid exponential search depth, refuse to allow recursion past
5379 three levels. Beyond that (1) it's highly unlikely that we'll find
5380 something interesting and (2) we've probably processed it before
5381 when we built the inner expression. */
5390 ret = extract_muldiv_1 (t, c, code, wide_type);
5397 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5399 tree type = TREE_TYPE (t);
5400 enum tree_code tcode = TREE_CODE (t);
5401 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5402 > GET_MODE_SIZE (TYPE_MODE (type)))
5403 ? wide_type : type);
5405 int same_p = tcode == code;
5406 tree op0 = NULL_TREE, op1 = NULL_TREE;
5408 /* Don't deal with constants of zero here; they confuse the code below. */
5409 if (integer_zerop (c))
5412 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5413 op0 = TREE_OPERAND (t, 0);
5415 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5416 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5418 /* Note that we need not handle conditional operations here since fold
5419 already handles those cases. So just do arithmetic here. */
5423 /* For a constant, we can always simplify if we are a multiply
5424 or (for divide and modulus) if it is a multiple of our constant. */
5425 if (code == MULT_EXPR
5426 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5427 return const_binop (code, fold_convert (ctype, t),
5428 fold_convert (ctype, c), 0);
5431 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5432 /* If op0 is an expression ... */
5433 if ((COMPARISON_CLASS_P (op0)
5434 || UNARY_CLASS_P (op0)
5435 || BINARY_CLASS_P (op0)
5436 || EXPRESSION_CLASS_P (op0))
5437 /* ... and is unsigned, and its type is smaller than ctype,
5438 then we cannot pass through as widening. */
5439 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5440 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5441 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5442 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5443 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5444 /* ... or this is a truncation (t is narrower than op0),
5445 then we cannot pass through this narrowing. */
5446 || (GET_MODE_SIZE (TYPE_MODE (type))
5447 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5448 /* ... or signedness changes for division or modulus,
5449 then we cannot pass through this conversion. */
5450 || (code != MULT_EXPR
5451 && (TYPE_UNSIGNED (ctype)
5452 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5455 /* Pass the constant down and see if we can make a simplification. If
5456 we can, replace this expression with the inner simplification for
5457 possible later conversion to our or some other type. */
5458 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5459 && TREE_CODE (t2) == INTEGER_CST
5460 && ! TREE_CONSTANT_OVERFLOW (t2)
5461 && (0 != (t1 = extract_muldiv (op0, t2, code,
5463 ? ctype : NULL_TREE))))
5468 /* If widening the type changes it from signed to unsigned, then we
5469 must avoid building ABS_EXPR itself as unsigned. */
5470 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5472 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5473 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5475 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5476 return fold_convert (ctype, t1);
5482 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5483 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5486 case MIN_EXPR: case MAX_EXPR:
5487 /* If widening the type changes the signedness, then we can't perform
5488 this optimization as that changes the result. */
5489 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5492 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5493 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5494 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5496 if (tree_int_cst_sgn (c) < 0)
5497 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5499 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5500 fold_convert (ctype, t2));
5504 case LSHIFT_EXPR: case RSHIFT_EXPR:
5505 /* If the second operand is constant, this is a multiplication
5506 or floor division, by a power of two, so we can treat it that
5507 way unless the multiplier or divisor overflows. Signed
5508 left-shift overflow is implementation-defined rather than
5509 undefined in C90, so do not convert signed left shift into
5511 if (TREE_CODE (op1) == INTEGER_CST
5512 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5513 /* const_binop may not detect overflow correctly,
5514 so check for it explicitly here. */
5515 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5516 && TREE_INT_CST_HIGH (op1) == 0
5517 && 0 != (t1 = fold_convert (ctype,
5518 const_binop (LSHIFT_EXPR,
5521 && ! TREE_OVERFLOW (t1))
5522 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5523 ? MULT_EXPR : FLOOR_DIV_EXPR,
5524 ctype, fold_convert (ctype, op0), t1),
5525 c, code, wide_type);
5528 case PLUS_EXPR: case MINUS_EXPR:
5529 /* See if we can eliminate the operation on both sides. If we can, we
5530 can return a new PLUS or MINUS. If we can't, the only remaining
5531 cases where we can do anything are if the second operand is a
5533 t1 = extract_muldiv (op0, c, code, wide_type);
5534 t2 = extract_muldiv (op1, c, code, wide_type);
5535 if (t1 != 0 && t2 != 0
5536 && (code == MULT_EXPR
5537 /* If not multiplication, we can only do this if both operands
5538 are divisible by c. */
5539 || (multiple_of_p (ctype, op0, c)
5540 && multiple_of_p (ctype, op1, c))))
5541 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5542 fold_convert (ctype, t2));
5544 /* If this was a subtraction, negate OP1 and set it to be an addition.
5545 This simplifies the logic below. */
5546 if (tcode == MINUS_EXPR)
5547 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5549 if (TREE_CODE (op1) != INTEGER_CST)
5552 /* If either OP1 or C are negative, this optimization is not safe for
5553 some of the division and remainder types while for others we need
5554 to change the code. */
5555 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5557 if (code == CEIL_DIV_EXPR)
5558 code = FLOOR_DIV_EXPR;
5559 else if (code == FLOOR_DIV_EXPR)
5560 code = CEIL_DIV_EXPR;
5561 else if (code != MULT_EXPR
5562 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5566 /* If it's a multiply or a division/modulus operation of a multiple
5567 of our constant, do the operation and verify it doesn't overflow. */
5568 if (code == MULT_EXPR
5569 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5571 op1 = const_binop (code, fold_convert (ctype, op1),
5572 fold_convert (ctype, c), 0);
5573 /* We allow the constant to overflow with wrapping semantics. */
5575 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5581 /* If we have an unsigned type is not a sizetype, we cannot widen
5582 the operation since it will change the result if the original
5583 computation overflowed. */
5584 if (TYPE_UNSIGNED (ctype)
5585 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5589 /* If we were able to eliminate our operation from the first side,
5590 apply our operation to the second side and reform the PLUS. */
5591 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5592 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5594 /* The last case is if we are a multiply. In that case, we can
5595 apply the distributive law to commute the multiply and addition
5596 if the multiplication of the constants doesn't overflow. */
5597 if (code == MULT_EXPR)
5598 return fold_build2 (tcode, ctype,
5599 fold_build2 (code, ctype,
5600 fold_convert (ctype, op0),
5601 fold_convert (ctype, c)),
5607 /* We have a special case here if we are doing something like
5608 (C * 8) % 4 since we know that's zero. */
5609 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5610 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5611 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5612 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5613 return omit_one_operand (type, integer_zero_node, op0);
5615 /* ... fall through ... */
5617 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5618 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5619 /* If we can extract our operation from the LHS, do so and return a
5620 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5621 do something only if the second operand is a constant. */
5623 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5624 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5625 fold_convert (ctype, op1));
5626 else if (tcode == MULT_EXPR && code == MULT_EXPR
5627 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5628 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5629 fold_convert (ctype, t1));
5630 else if (TREE_CODE (op1) != INTEGER_CST)
5633 /* If these are the same operation types, we can associate them
5634 assuming no overflow. */
5636 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5637 fold_convert (ctype, c), 0))
5638 && ! TREE_OVERFLOW (t1))
5639 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5641 /* If these operations "cancel" each other, we have the main
5642 optimizations of this pass, which occur when either constant is a
5643 multiple of the other, in which case we replace this with either an
5644 operation or CODE or TCODE.
5646 If we have an unsigned type that is not a sizetype, we cannot do
5647 this since it will change the result if the original computation
5649 if ((! TYPE_UNSIGNED (ctype)
5650 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5652 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5653 || (tcode == MULT_EXPR
5654 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5655 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5657 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5658 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5659 fold_convert (ctype,
5660 const_binop (TRUNC_DIV_EXPR,
5662 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5663 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5664 fold_convert (ctype,
5665 const_binop (TRUNC_DIV_EXPR,
5677 /* Return a node which has the indicated constant VALUE (either 0 or
5678 1), and is of the indicated TYPE. */
5681 constant_boolean_node (int value, tree type)
5683 if (type == integer_type_node)
5684 return value ? integer_one_node : integer_zero_node;
5685 else if (type == boolean_type_node)
5686 return value ? boolean_true_node : boolean_false_node;
5688 return build_int_cst (type, value);
5692 /* Return true if expr looks like an ARRAY_REF and set base and
5693 offset to the appropriate trees. If there is no offset,
5694 offset is set to NULL_TREE. Base will be canonicalized to
5695 something you can get the element type from using
5696 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5697 in bytes to the base. */
5700 extract_array_ref (tree expr, tree *base, tree *offset)
5702 /* One canonical form is a PLUS_EXPR with the first
5703 argument being an ADDR_EXPR with a possible NOP_EXPR
5705 if (TREE_CODE (expr) == PLUS_EXPR)
5707 tree op0 = TREE_OPERAND (expr, 0);
5708 tree inner_base, dummy1;
5709 /* Strip NOP_EXPRs here because the C frontends and/or
5710 folders present us (int *)&x.a + 4B possibly. */
5712 if (extract_array_ref (op0, &inner_base, &dummy1))
5715 if (dummy1 == NULL_TREE)
5716 *offset = TREE_OPERAND (expr, 1);
5718 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5719 dummy1, TREE_OPERAND (expr, 1));
5723 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5724 which we transform into an ADDR_EXPR with appropriate
5725 offset. For other arguments to the ADDR_EXPR we assume
5726 zero offset and as such do not care about the ADDR_EXPR
5727 type and strip possible nops from it. */
5728 else if (TREE_CODE (expr) == ADDR_EXPR)
5730 tree op0 = TREE_OPERAND (expr, 0);
5731 if (TREE_CODE (op0) == ARRAY_REF)
5733 tree idx = TREE_OPERAND (op0, 1);
5734 *base = TREE_OPERAND (op0, 0);
5735 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5736 array_ref_element_size (op0));
5740 /* Handle array-to-pointer decay as &a. */
5741 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5742 *base = TREE_OPERAND (expr, 0);
5745 *offset = NULL_TREE;
5749 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5750 else if (SSA_VAR_P (expr)
5751 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5754 *offset = NULL_TREE;
5762 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5763 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5764 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5765 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5766 COND is the first argument to CODE; otherwise (as in the example
5767 given here), it is the second argument. TYPE is the type of the
5768 original expression. Return NULL_TREE if no simplification is
5772 fold_binary_op_with_conditional_arg (enum tree_code code,
5773 tree type, tree op0, tree op1,
5774 tree cond, tree arg, int cond_first_p)
5776 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5777 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5778 tree test, true_value, false_value;
5779 tree lhs = NULL_TREE;
5780 tree rhs = NULL_TREE;
5782 /* This transformation is only worthwhile if we don't have to wrap
5783 arg in a SAVE_EXPR, and the operation can be simplified on at least
5784 one of the branches once its pushed inside the COND_EXPR. */
5785 if (!TREE_CONSTANT (arg))
5788 if (TREE_CODE (cond) == COND_EXPR)
5790 test = TREE_OPERAND (cond, 0);
5791 true_value = TREE_OPERAND (cond, 1);
5792 false_value = TREE_OPERAND (cond, 2);
5793 /* If this operand throws an expression, then it does not make
5794 sense to try to perform a logical or arithmetic operation
5796 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5798 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5803 tree testtype = TREE_TYPE (cond);
5805 true_value = constant_boolean_node (true, testtype);
5806 false_value = constant_boolean_node (false, testtype);
5809 arg = fold_convert (arg_type, arg);
5812 true_value = fold_convert (cond_type, true_value);
5814 lhs = fold_build2 (code, type, true_value, arg);
5816 lhs = fold_build2 (code, type, arg, true_value);
5820 false_value = fold_convert (cond_type, false_value);
5822 rhs = fold_build2 (code, type, false_value, arg);
5824 rhs = fold_build2 (code, type, arg, false_value);
5827 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5828 return fold_convert (type, test);
5832 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5834 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5835 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5836 ADDEND is the same as X.
5838 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5839 and finite. The problematic cases are when X is zero, and its mode
5840 has signed zeros. In the case of rounding towards -infinity,
5841 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5842 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5845 fold_real_zero_addition_p (tree type, tree addend, int negate)
5847 if (!real_zerop (addend))
5850 /* Don't allow the fold with -fsignaling-nans. */
5851 if (HONOR_SNANS (TYPE_MODE (type)))
5854 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5855 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5858 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5859 if (TREE_CODE (addend) == REAL_CST
5860 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5863 /* The mode has signed zeros, and we have to honor their sign.
5864 In this situation, there is only one case we can return true for.
5865 X - 0 is the same as X unless rounding towards -infinity is
5867 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5870 /* Subroutine of fold() that checks comparisons of built-in math
5871 functions against real constants.
5873 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5874 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5875 is the type of the result and ARG0 and ARG1 are the operands of the
5876 comparison. ARG1 must be a TREE_REAL_CST.
5878 The function returns the constant folded tree if a simplification
5879 can be made, and NULL_TREE otherwise. */
5882 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5883 tree type, tree arg0, tree arg1)
5887 if (BUILTIN_SQRT_P (fcode))
5889 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5890 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5892 c = TREE_REAL_CST (arg1);
5893 if (REAL_VALUE_NEGATIVE (c))
5895 /* sqrt(x) < y is always false, if y is negative. */
5896 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5897 return omit_one_operand (type, integer_zero_node, arg);
5899 /* sqrt(x) > y is always true, if y is negative and we
5900 don't care about NaNs, i.e. negative values of x. */
5901 if (code == NE_EXPR || !HONOR_NANS (mode))
5902 return omit_one_operand (type, integer_one_node, arg);
5904 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5905 return fold_build2 (GE_EXPR, type, arg,
5906 build_real (TREE_TYPE (arg), dconst0));
5908 else if (code == GT_EXPR || code == GE_EXPR)
5912 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5913 real_convert (&c2, mode, &c2);
5915 if (REAL_VALUE_ISINF (c2))
5917 /* sqrt(x) > y is x == +Inf, when y is very large. */
5918 if (HONOR_INFINITIES (mode))
5919 return fold_build2 (EQ_EXPR, type, arg,
5920 build_real (TREE_TYPE (arg), c2));
5922 /* sqrt(x) > y is always false, when y is very large
5923 and we don't care about infinities. */
5924 return omit_one_operand (type, integer_zero_node, arg);
5927 /* sqrt(x) > c is the same as x > c*c. */
5928 return fold_build2 (code, type, arg,
5929 build_real (TREE_TYPE (arg), c2));
5931 else if (code == LT_EXPR || code == LE_EXPR)
5935 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5936 real_convert (&c2, mode, &c2);
5938 if (REAL_VALUE_ISINF (c2))
5940 /* sqrt(x) < y is always true, when y is a very large
5941 value and we don't care about NaNs or Infinities. */
5942 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5943 return omit_one_operand (type, integer_one_node, arg);
5945 /* sqrt(x) < y is x != +Inf when y is very large and we
5946 don't care about NaNs. */
5947 if (! HONOR_NANS (mode))
5948 return fold_build2 (NE_EXPR, type, arg,
5949 build_real (TREE_TYPE (arg), c2));
5951 /* sqrt(x) < y is x >= 0 when y is very large and we
5952 don't care about Infinities. */
5953 if (! HONOR_INFINITIES (mode))
5954 return fold_build2 (GE_EXPR, type, arg,
5955 build_real (TREE_TYPE (arg), dconst0));
5957 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5958 if (lang_hooks.decls.global_bindings_p () != 0
5959 || CONTAINS_PLACEHOLDER_P (arg))
5962 arg = save_expr (arg);
5963 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5964 fold_build2 (GE_EXPR, type, arg,
5965 build_real (TREE_TYPE (arg),
5967 fold_build2 (NE_EXPR, type, arg,
5968 build_real (TREE_TYPE (arg),
5972 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5973 if (! HONOR_NANS (mode))
5974 return fold_build2 (code, type, arg,
5975 build_real (TREE_TYPE (arg), c2));
5977 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5978 if (lang_hooks.decls.global_bindings_p () == 0
5979 && ! CONTAINS_PLACEHOLDER_P (arg))
5981 arg = save_expr (arg);
5982 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5983 fold_build2 (GE_EXPR, type, arg,
5984 build_real (TREE_TYPE (arg),
5986 fold_build2 (code, type, arg,
5987 build_real (TREE_TYPE (arg),
5996 /* Subroutine of fold() that optimizes comparisons against Infinities,
5997 either +Inf or -Inf.
5999 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6000 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6001 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6003 The function returns the constant folded tree if a simplification
6004 can be made, and NULL_TREE otherwise. */
6007 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6009 enum machine_mode mode;
6010 REAL_VALUE_TYPE max;
6014 mode = TYPE_MODE (TREE_TYPE (arg0));
6016 /* For negative infinity swap the sense of the comparison. */
6017 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6019 code = swap_tree_comparison (code);
6024 /* x > +Inf is always false, if with ignore sNANs. */
6025 if (HONOR_SNANS (mode))
6027 return omit_one_operand (type, integer_zero_node, arg0);
6030 /* x <= +Inf is always true, if we don't case about NaNs. */
6031 if (! HONOR_NANS (mode))
6032 return omit_one_operand (type, integer_one_node, arg0);
6034 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6035 if (lang_hooks.decls.global_bindings_p () == 0
6036 && ! CONTAINS_PLACEHOLDER_P (arg0))
6038 arg0 = save_expr (arg0);
6039 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6045 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6046 real_maxval (&max, neg, mode);
6047 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6048 arg0, build_real (TREE_TYPE (arg0), max));
6051 /* x < +Inf is always equal to x <= DBL_MAX. */
6052 real_maxval (&max, neg, mode);
6053 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6054 arg0, build_real (TREE_TYPE (arg0), max));
6057 /* x != +Inf is always equal to !(x > DBL_MAX). */
6058 real_maxval (&max, neg, mode);
6059 if (! HONOR_NANS (mode))
6060 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6061 arg0, build_real (TREE_TYPE (arg0), max));
6063 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6064 arg0, build_real (TREE_TYPE (arg0), max));
6065 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6074 /* Subroutine of fold() that optimizes comparisons of a division by
6075 a nonzero integer constant against an integer constant, i.e.
6078 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6079 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6080 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6082 The function returns the constant folded tree if a simplification
6083 can be made, and NULL_TREE otherwise. */
6086 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6088 tree prod, tmp, hi, lo;
6089 tree arg00 = TREE_OPERAND (arg0, 0);
6090 tree arg01 = TREE_OPERAND (arg0, 1);
6091 unsigned HOST_WIDE_INT lpart;
6092 HOST_WIDE_INT hpart;
6093 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6097 /* We have to do this the hard way to detect unsigned overflow.
6098 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6099 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6100 TREE_INT_CST_HIGH (arg01),
6101 TREE_INT_CST_LOW (arg1),
6102 TREE_INT_CST_HIGH (arg1),
6103 &lpart, &hpart, unsigned_p);
6104 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6105 prod = force_fit_type (prod, -1, overflow, false);
6106 neg_overflow = false;
6110 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6113 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6114 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6115 TREE_INT_CST_HIGH (prod),
6116 TREE_INT_CST_LOW (tmp),
6117 TREE_INT_CST_HIGH (tmp),
6118 &lpart, &hpart, unsigned_p);
6119 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6120 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6121 TREE_CONSTANT_OVERFLOW (prod));
6123 else if (tree_int_cst_sgn (arg01) >= 0)
6125 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6126 switch (tree_int_cst_sgn (arg1))
6129 neg_overflow = true;
6130 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6135 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6140 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6150 /* A negative divisor reverses the relational operators. */
6151 code = swap_tree_comparison (code);
6153 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6154 switch (tree_int_cst_sgn (arg1))
6157 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6162 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6167 neg_overflow = true;
6168 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6180 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6181 return omit_one_operand (type, integer_zero_node, arg00);
6182 if (TREE_OVERFLOW (hi))
6183 return fold_build2 (GE_EXPR, type, arg00, lo);
6184 if (TREE_OVERFLOW (lo))
6185 return fold_build2 (LE_EXPR, type, arg00, hi);
6186 return build_range_check (type, arg00, 1, lo, hi);
6189 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6190 return omit_one_operand (type, integer_one_node, arg00);
6191 if (TREE_OVERFLOW (hi))
6192 return fold_build2 (LT_EXPR, type, arg00, lo);
6193 if (TREE_OVERFLOW (lo))
6194 return fold_build2 (GT_EXPR, type, arg00, hi);
6195 return build_range_check (type, arg00, 0, lo, hi);
6198 if (TREE_OVERFLOW (lo))
6200 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6201 return omit_one_operand (type, tmp, arg00);
6203 return fold_build2 (LT_EXPR, type, arg00, lo);
6206 if (TREE_OVERFLOW (hi))
6208 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6209 return omit_one_operand (type, tmp, arg00);
6211 return fold_build2 (LE_EXPR, type, arg00, hi);
6214 if (TREE_OVERFLOW (hi))
6216 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6217 return omit_one_operand (type, tmp, arg00);
6219 return fold_build2 (GT_EXPR, type, arg00, hi);
6222 if (TREE_OVERFLOW (lo))
6224 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6225 return omit_one_operand (type, tmp, arg00);
6227 return fold_build2 (GE_EXPR, type, arg00, lo);
6237 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6238 equality/inequality test, then return a simplified form of the test
6239 using a sign testing. Otherwise return NULL. TYPE is the desired
6243 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6246 /* If this is testing a single bit, we can optimize the test. */
6247 if ((code == NE_EXPR || code == EQ_EXPR)
6248 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6249 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6251 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6252 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6253 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6255 if (arg00 != NULL_TREE
6256 /* This is only a win if casting to a signed type is cheap,
6257 i.e. when arg00's type is not a partial mode. */
6258 && TYPE_PRECISION (TREE_TYPE (arg00))
6259 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6261 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6262 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6263 result_type, fold_convert (stype, arg00),
6264 build_int_cst (stype, 0));
6271 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6272 equality/inequality test, then return a simplified form of
6273 the test using shifts and logical operations. Otherwise return
6274 NULL. TYPE is the desired result type. */
6277 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6280 /* If this is testing a single bit, we can optimize the test. */
6281 if ((code == NE_EXPR || code == EQ_EXPR)
6282 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6283 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6285 tree inner = TREE_OPERAND (arg0, 0);
6286 tree type = TREE_TYPE (arg0);
6287 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6288 enum machine_mode operand_mode = TYPE_MODE (type);
6290 tree signed_type, unsigned_type, intermediate_type;
6293 /* First, see if we can fold the single bit test into a sign-bit
6295 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6300 /* Otherwise we have (A & C) != 0 where C is a single bit,
6301 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6302 Similarly for (A & C) == 0. */
6304 /* If INNER is a right shift of a constant and it plus BITNUM does
6305 not overflow, adjust BITNUM and INNER. */
6306 if (TREE_CODE (inner) == RSHIFT_EXPR
6307 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6308 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6309 && bitnum < TYPE_PRECISION (type)
6310 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6311 bitnum - TYPE_PRECISION (type)))
6313 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6314 inner = TREE_OPERAND (inner, 0);
6317 /* If we are going to be able to omit the AND below, we must do our
6318 operations as unsigned. If we must use the AND, we have a choice.
6319 Normally unsigned is faster, but for some machines signed is. */
6320 #ifdef LOAD_EXTEND_OP
6321 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6322 && !flag_syntax_only) ? 0 : 1;
6327 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6328 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6329 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6330 inner = fold_convert (intermediate_type, inner);
6333 inner = build2 (RSHIFT_EXPR, intermediate_type,
6334 inner, size_int (bitnum));
6336 if (code == EQ_EXPR)
6337 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6338 inner, integer_one_node);
6340 /* Put the AND last so it can combine with more things. */
6341 inner = build2 (BIT_AND_EXPR, intermediate_type,
6342 inner, integer_one_node);
6344 /* Make sure to return the proper type. */
6345 inner = fold_convert (result_type, inner);
6352 /* Check whether we are allowed to reorder operands arg0 and arg1,
6353 such that the evaluation of arg1 occurs before arg0. */
6356 reorder_operands_p (tree arg0, tree arg1)
6358 if (! flag_evaluation_order)
6360 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6362 return ! TREE_SIDE_EFFECTS (arg0)
6363 && ! TREE_SIDE_EFFECTS (arg1);
6366 /* Test whether it is preferable two swap two operands, ARG0 and
6367 ARG1, for example because ARG0 is an integer constant and ARG1
6368 isn't. If REORDER is true, only recommend swapping if we can
6369 evaluate the operands in reverse order. */
6372 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6374 STRIP_SIGN_NOPS (arg0);
6375 STRIP_SIGN_NOPS (arg1);
6377 if (TREE_CODE (arg1) == INTEGER_CST)
6379 if (TREE_CODE (arg0) == INTEGER_CST)
6382 if (TREE_CODE (arg1) == REAL_CST)
6384 if (TREE_CODE (arg0) == REAL_CST)
6387 if (TREE_CODE (arg1) == COMPLEX_CST)
6389 if (TREE_CODE (arg0) == COMPLEX_CST)
6392 if (TREE_CONSTANT (arg1))
6394 if (TREE_CONSTANT (arg0))
6400 if (reorder && flag_evaluation_order
6401 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6409 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6410 for commutative and comparison operators. Ensuring a canonical
6411 form allows the optimizers to find additional redundancies without
6412 having to explicitly check for both orderings. */
6413 if (TREE_CODE (arg0) == SSA_NAME
6414 && TREE_CODE (arg1) == SSA_NAME
6415 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6421 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6422 ARG0 is extended to a wider type. */
6425 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6427 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6429 tree shorter_type, outer_type;
6433 if (arg0_unw == arg0)
6435 shorter_type = TREE_TYPE (arg0_unw);
6437 #ifdef HAVE_canonicalize_funcptr_for_compare
6438 /* Disable this optimization if we're casting a function pointer
6439 type on targets that require function pointer canonicalization. */
6440 if (HAVE_canonicalize_funcptr_for_compare
6441 && TREE_CODE (shorter_type) == POINTER_TYPE
6442 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6446 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6449 arg1_unw = get_unwidened (arg1, shorter_type);
6451 /* If possible, express the comparison in the shorter mode. */
6452 if ((code == EQ_EXPR || code == NE_EXPR
6453 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6454 && (TREE_TYPE (arg1_unw) == shorter_type
6455 || (TREE_CODE (arg1_unw) == INTEGER_CST
6456 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6457 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6458 && int_fits_type_p (arg1_unw, shorter_type))))
6459 return fold_build2 (code, type, arg0_unw,
6460 fold_convert (shorter_type, arg1_unw));
6462 if (TREE_CODE (arg1_unw) != INTEGER_CST
6463 || TREE_CODE (shorter_type) != INTEGER_TYPE
6464 || !int_fits_type_p (arg1_unw, shorter_type))
6467 /* If we are comparing with the integer that does not fit into the range
6468 of the shorter type, the result is known. */
6469 outer_type = TREE_TYPE (arg1_unw);
6470 min = lower_bound_in_type (outer_type, shorter_type);
6471 max = upper_bound_in_type (outer_type, shorter_type);
6473 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6475 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6482 return omit_one_operand (type, integer_zero_node, arg0);
6487 return omit_one_operand (type, integer_one_node, arg0);
6493 return omit_one_operand (type, integer_one_node, arg0);
6495 return omit_one_operand (type, integer_zero_node, arg0);
6500 return omit_one_operand (type, integer_zero_node, arg0);
6502 return omit_one_operand (type, integer_one_node, arg0);
6511 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6512 ARG0 just the signedness is changed. */
6515 fold_sign_changed_comparison (enum tree_code code, tree type,
6516 tree arg0, tree arg1)
6518 tree arg0_inner, tmp;
6519 tree inner_type, outer_type;
6521 if (TREE_CODE (arg0) != NOP_EXPR
6522 && TREE_CODE (arg0) != CONVERT_EXPR)
6525 outer_type = TREE_TYPE (arg0);
6526 arg0_inner = TREE_OPERAND (arg0, 0);
6527 inner_type = TREE_TYPE (arg0_inner);
6529 #ifdef HAVE_canonicalize_funcptr_for_compare
6530 /* Disable this optimization if we're casting a function pointer
6531 type on targets that require function pointer canonicalization. */
6532 if (HAVE_canonicalize_funcptr_for_compare
6533 && TREE_CODE (inner_type) == POINTER_TYPE
6534 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6538 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6541 if (TREE_CODE (arg1) != INTEGER_CST
6542 && !((TREE_CODE (arg1) == NOP_EXPR
6543 || TREE_CODE (arg1) == CONVERT_EXPR)
6544 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6547 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6552 if (TREE_CODE (arg1) == INTEGER_CST)
6554 tmp = build_int_cst_wide (inner_type,
6555 TREE_INT_CST_LOW (arg1),
6556 TREE_INT_CST_HIGH (arg1));
6557 arg1 = force_fit_type (tmp, 0,
6558 TREE_OVERFLOW (arg1),
6559 TREE_CONSTANT_OVERFLOW (arg1));
6562 arg1 = fold_convert (inner_type, arg1);
6564 return fold_build2 (code, type, arg0_inner, arg1);
6567 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6568 step of the array. Reconstructs s and delta in the case of s * delta
6569 being an integer constant (and thus already folded).
6570 ADDR is the address. MULT is the multiplicative expression.
6571 If the function succeeds, the new address expression is returned. Otherwise
6572 NULL_TREE is returned. */
6575 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6577 tree s, delta, step;
6578 tree ref = TREE_OPERAND (addr, 0), pref;
6582 /* Canonicalize op1 into a possibly non-constant delta
6583 and an INTEGER_CST s. */
6584 if (TREE_CODE (op1) == MULT_EXPR)
6586 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6591 if (TREE_CODE (arg0) == INTEGER_CST)
6596 else if (TREE_CODE (arg1) == INTEGER_CST)
6604 else if (TREE_CODE (op1) == INTEGER_CST)
6611 /* Simulate we are delta * 1. */
6613 s = integer_one_node;
6616 for (;; ref = TREE_OPERAND (ref, 0))
6618 if (TREE_CODE (ref) == ARRAY_REF)
6620 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6624 step = array_ref_element_size (ref);
6625 if (TREE_CODE (step) != INTEGER_CST)
6630 if (! tree_int_cst_equal (step, s))
6635 /* Try if delta is a multiple of step. */
6636 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6645 if (!handled_component_p (ref))
6649 /* We found the suitable array reference. So copy everything up to it,
6650 and replace the index. */
6652 pref = TREE_OPERAND (addr, 0);
6653 ret = copy_node (pref);
6658 pref = TREE_OPERAND (pref, 0);
6659 TREE_OPERAND (pos, 0) = copy_node (pref);
6660 pos = TREE_OPERAND (pos, 0);
6663 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6664 fold_convert (itype,
6665 TREE_OPERAND (pos, 1)),
6666 fold_convert (itype, delta));
6668 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6672 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6673 means A >= Y && A != MAX, but in this case we know that
6674 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6677 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6679 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6681 if (TREE_CODE (bound) == LT_EXPR)
6682 a = TREE_OPERAND (bound, 0);
6683 else if (TREE_CODE (bound) == GT_EXPR)
6684 a = TREE_OPERAND (bound, 1);
6688 typea = TREE_TYPE (a);
6689 if (!INTEGRAL_TYPE_P (typea)
6690 && !POINTER_TYPE_P (typea))
6693 if (TREE_CODE (ineq) == LT_EXPR)
6695 a1 = TREE_OPERAND (ineq, 1);
6696 y = TREE_OPERAND (ineq, 0);
6698 else if (TREE_CODE (ineq) == GT_EXPR)
6700 a1 = TREE_OPERAND (ineq, 0);
6701 y = TREE_OPERAND (ineq, 1);
6706 if (TREE_TYPE (a1) != typea)
6709 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6710 if (!integer_onep (diff))
6713 return fold_build2 (GE_EXPR, type, a, y);
6716 /* Fold a sum or difference of at least one multiplication.
6717 Returns the folded tree or NULL if no simplification could be made. */
6720 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6722 tree arg00, arg01, arg10, arg11;
6723 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6725 /* (A * C) +- (B * C) -> (A+-B) * C.
6726 (A * C) +- A -> A * (C+-1).
6727 We are most concerned about the case where C is a constant,
6728 but other combinations show up during loop reduction. Since
6729 it is not difficult, try all four possibilities. */
6731 if (TREE_CODE (arg0) == MULT_EXPR)
6733 arg00 = TREE_OPERAND (arg0, 0);
6734 arg01 = TREE_OPERAND (arg0, 1);
6739 arg01 = build_one_cst (type);
6741 if (TREE_CODE (arg1) == MULT_EXPR)
6743 arg10 = TREE_OPERAND (arg1, 0);
6744 arg11 = TREE_OPERAND (arg1, 1);
6749 arg11 = build_one_cst (type);
6753 if (operand_equal_p (arg01, arg11, 0))
6754 same = arg01, alt0 = arg00, alt1 = arg10;
6755 else if (operand_equal_p (arg00, arg10, 0))
6756 same = arg00, alt0 = arg01, alt1 = arg11;
6757 else if (operand_equal_p (arg00, arg11, 0))
6758 same = arg00, alt0 = arg01, alt1 = arg10;
6759 else if (operand_equal_p (arg01, arg10, 0))
6760 same = arg01, alt0 = arg00, alt1 = arg11;
6762 /* No identical multiplicands; see if we can find a common
6763 power-of-two factor in non-power-of-two multiplies. This
6764 can help in multi-dimensional array access. */
6765 else if (host_integerp (arg01, 0)
6766 && host_integerp (arg11, 0))
6768 HOST_WIDE_INT int01, int11, tmp;
6771 int01 = TREE_INT_CST_LOW (arg01);
6772 int11 = TREE_INT_CST_LOW (arg11);
6774 /* Move min of absolute values to int11. */
6775 if ((int01 >= 0 ? int01 : -int01)
6776 < (int11 >= 0 ? int11 : -int11))
6778 tmp = int01, int01 = int11, int11 = tmp;
6779 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6786 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6788 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6789 build_int_cst (TREE_TYPE (arg00),
6794 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6799 return fold_build2 (MULT_EXPR, type,
6800 fold_build2 (code, type,
6801 fold_convert (type, alt0),
6802 fold_convert (type, alt1)),
6803 fold_convert (type, same));
6808 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6809 specified by EXPR into the buffer PTR of length LEN bytes.
6810 Return the number of bytes placed in the buffer, or zero
6814 native_encode_int (tree expr, unsigned char *ptr, int len)
6816 tree type = TREE_TYPE (expr);
6817 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6818 int byte, offset, word, words;
6819 unsigned char value;
6821 if (total_bytes > len)
6823 words = total_bytes / UNITS_PER_WORD;
6825 for (byte = 0; byte < total_bytes; byte++)
6827 int bitpos = byte * BITS_PER_UNIT;
6828 if (bitpos < HOST_BITS_PER_WIDE_INT)
6829 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6831 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6832 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6834 if (total_bytes > UNITS_PER_WORD)
6836 word = byte / UNITS_PER_WORD;
6837 if (WORDS_BIG_ENDIAN)
6838 word = (words - 1) - word;
6839 offset = word * UNITS_PER_WORD;
6840 if (BYTES_BIG_ENDIAN)
6841 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6843 offset += byte % UNITS_PER_WORD;
6846 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6847 ptr[offset] = value;
6853 /* Subroutine of native_encode_expr. Encode the REAL_CST
6854 specified by EXPR into the buffer PTR of length LEN bytes.
6855 Return the number of bytes placed in the buffer, or zero
6859 native_encode_real (tree expr, unsigned char *ptr, int len)
6861 tree type = TREE_TYPE (expr);
6862 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6863 int byte, offset, word, words;
6864 unsigned char value;
6866 /* There are always 32 bits in each long, no matter the size of
6867 the hosts long. We handle floating point representations with
6871 if (total_bytes > len)
6873 words = total_bytes / UNITS_PER_WORD;
6875 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6877 for (byte = 0; byte < total_bytes; byte++)
6879 int bitpos = byte * BITS_PER_UNIT;
6880 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6882 if (total_bytes > UNITS_PER_WORD)
6884 word = byte / UNITS_PER_WORD;
6885 if (FLOAT_WORDS_BIG_ENDIAN)
6886 word = (words - 1) - word;
6887 offset = word * UNITS_PER_WORD;
6888 if (BYTES_BIG_ENDIAN)
6889 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6891 offset += byte % UNITS_PER_WORD;
6894 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6895 ptr[offset] = value;
6900 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6901 specified by EXPR into the buffer PTR of length LEN bytes.
6902 Return the number of bytes placed in the buffer, or zero
6906 native_encode_complex (tree expr, unsigned char *ptr, int len)
6911 part = TREE_REALPART (expr);
6912 rsize = native_encode_expr (part, ptr, len);
6915 part = TREE_IMAGPART (expr);
6916 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6919 return rsize + isize;
6923 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6924 specified by EXPR into the buffer PTR of length LEN bytes.
6925 Return the number of bytes placed in the buffer, or zero
6929 native_encode_vector (tree expr, unsigned char *ptr, int len)
6931 int i, size, offset, count;
6932 tree itype, elem, elements;
6935 elements = TREE_VECTOR_CST_ELTS (expr);
6936 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6937 itype = TREE_TYPE (TREE_TYPE (expr));
6938 size = GET_MODE_SIZE (TYPE_MODE (itype));
6939 for (i = 0; i < count; i++)
6943 elem = TREE_VALUE (elements);
6944 elements = TREE_CHAIN (elements);
6951 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6956 if (offset + size > len)
6958 memset (ptr+offset, 0, size);
6966 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6967 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6968 buffer PTR of length LEN bytes. Return the number of bytes
6969 placed in the buffer, or zero upon failure. */
6972 native_encode_expr (tree expr, unsigned char *ptr, int len)
6974 switch (TREE_CODE (expr))
6977 return native_encode_int (expr, ptr, len);
6980 return native_encode_real (expr, ptr, len);
6983 return native_encode_complex (expr, ptr, len);
6986 return native_encode_vector (expr, ptr, len);
6994 /* Subroutine of native_interpret_expr. Interpret the contents of
6995 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6996 If the buffer cannot be interpreted, return NULL_TREE. */
6999 native_interpret_int (tree type, unsigned char *ptr, int len)
7001 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7002 int byte, offset, word, words;
7003 unsigned char value;
7004 unsigned int HOST_WIDE_INT lo = 0;
7005 HOST_WIDE_INT hi = 0;
7007 if (total_bytes > len)
7009 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7011 words = total_bytes / UNITS_PER_WORD;
7013 for (byte = 0; byte < total_bytes; byte++)
7015 int bitpos = byte * BITS_PER_UNIT;
7016 if (total_bytes > UNITS_PER_WORD)
7018 word = byte / UNITS_PER_WORD;
7019 if (WORDS_BIG_ENDIAN)
7020 word = (words - 1) - word;
7021 offset = word * UNITS_PER_WORD;
7022 if (BYTES_BIG_ENDIAN)
7023 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7025 offset += byte % UNITS_PER_WORD;
7028 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7029 value = ptr[offset];
7031 if (bitpos < HOST_BITS_PER_WIDE_INT)
7032 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7034 hi |= (unsigned HOST_WIDE_INT) value
7035 << (bitpos - HOST_BITS_PER_WIDE_INT);
7038 return force_fit_type (build_int_cst_wide (type, lo, hi),
7043 /* Subroutine of native_interpret_expr. Interpret the contents of
7044 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7045 If the buffer cannot be interpreted, return NULL_TREE. */
7048 native_interpret_real (tree type, unsigned char *ptr, int len)
7050 enum machine_mode mode = TYPE_MODE (type);
7051 int total_bytes = GET_MODE_SIZE (mode);
7052 int byte, offset, word, words;
7053 unsigned char value;
7054 /* There are always 32 bits in each long, no matter the size of
7055 the hosts long. We handle floating point representations with
7060 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7061 if (total_bytes > len || total_bytes > 24)
7063 words = total_bytes / UNITS_PER_WORD;
7065 memset (tmp, 0, sizeof (tmp));
7066 for (byte = 0; byte < total_bytes; byte++)
7068 int bitpos = byte * BITS_PER_UNIT;
7069 if (total_bytes > UNITS_PER_WORD)
7071 word = byte / UNITS_PER_WORD;
7072 if (FLOAT_WORDS_BIG_ENDIAN)
7073 word = (words - 1) - word;
7074 offset = word * UNITS_PER_WORD;
7075 if (BYTES_BIG_ENDIAN)
7076 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7078 offset += byte % UNITS_PER_WORD;
7081 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7082 value = ptr[offset];
7084 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7087 real_from_target (&r, tmp, mode);
7088 return build_real (type, r);
7092 /* Subroutine of native_interpret_expr. Interpret the contents of
7093 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7094 If the buffer cannot be interpreted, return NULL_TREE. */
7097 native_interpret_complex (tree type, unsigned char *ptr, int len)
7099 tree etype, rpart, ipart;
7102 etype = TREE_TYPE (type);
7103 size = GET_MODE_SIZE (TYPE_MODE (etype));
7106 rpart = native_interpret_expr (etype, ptr, size);
7109 ipart = native_interpret_expr (etype, ptr+size, size);
7112 return build_complex (type, rpart, ipart);
7116 /* Subroutine of native_interpret_expr. Interpret the contents of
7117 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7118 If the buffer cannot be interpreted, return NULL_TREE. */
7121 native_interpret_vector (tree type, unsigned char *ptr, int len)
7123 tree etype, elem, elements;
7126 etype = TREE_TYPE (type);
7127 size = GET_MODE_SIZE (TYPE_MODE (etype));
7128 count = TYPE_VECTOR_SUBPARTS (type);
7129 if (size * count > len)
7132 elements = NULL_TREE;
7133 for (i = count - 1; i >= 0; i--)
7135 elem = native_interpret_expr (etype, ptr+(i*size), size);
7138 elements = tree_cons (NULL_TREE, elem, elements);
7140 return build_vector (type, elements);
7144 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7145 the buffer PTR of length LEN as a constant of type TYPE. For
7146 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7147 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7148 return NULL_TREE. */
7151 native_interpret_expr (tree type, unsigned char *ptr, int len)
7153 switch (TREE_CODE (type))
7158 return native_interpret_int (type, ptr, len);
7161 return native_interpret_real (type, ptr, len);
7164 return native_interpret_complex (type, ptr, len);
7167 return native_interpret_vector (type, ptr, len);
7175 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7176 TYPE at compile-time. If we're unable to perform the conversion
7177 return NULL_TREE. */
7180 fold_view_convert_expr (tree type, tree expr)
7182 /* We support up to 512-bit values (for V8DFmode). */
7183 unsigned char buffer[64];
7186 /* Check that the host and target are sane. */
7187 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7190 len = native_encode_expr (expr, buffer, sizeof (buffer));
7194 return native_interpret_expr (type, buffer, len);
7198 /* Fold a unary expression of code CODE and type TYPE with operand
7199 OP0. Return the folded expression if folding is successful.
7200 Otherwise, return NULL_TREE. */
7203 fold_unary (enum tree_code code, tree type, tree op0)
7207 enum tree_code_class kind = TREE_CODE_CLASS (code);
7209 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7210 && TREE_CODE_LENGTH (code) == 1);
7215 if (code == NOP_EXPR || code == CONVERT_EXPR
7216 || code == FLOAT_EXPR || code == ABS_EXPR)
7218 /* Don't use STRIP_NOPS, because signedness of argument type
7220 STRIP_SIGN_NOPS (arg0);
7224 /* Strip any conversions that don't change the mode. This
7225 is safe for every expression, except for a comparison
7226 expression because its signedness is derived from its
7229 Note that this is done as an internal manipulation within
7230 the constant folder, in order to find the simplest
7231 representation of the arguments so that their form can be
7232 studied. In any cases, the appropriate type conversions
7233 should be put back in the tree that will get out of the
7239 if (TREE_CODE_CLASS (code) == tcc_unary)
7241 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7242 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7243 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7244 else if (TREE_CODE (arg0) == COND_EXPR)
7246 tree arg01 = TREE_OPERAND (arg0, 1);
7247 tree arg02 = TREE_OPERAND (arg0, 2);
7248 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7249 arg01 = fold_build1 (code, type, arg01);
7250 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7251 arg02 = fold_build1 (code, type, arg02);
7252 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7255 /* If this was a conversion, and all we did was to move into
7256 inside the COND_EXPR, bring it back out. But leave it if
7257 it is a conversion from integer to integer and the
7258 result precision is no wider than a word since such a
7259 conversion is cheap and may be optimized away by combine,
7260 while it couldn't if it were outside the COND_EXPR. Then return
7261 so we don't get into an infinite recursion loop taking the
7262 conversion out and then back in. */
7264 if ((code == NOP_EXPR || code == CONVERT_EXPR
7265 || code == NON_LVALUE_EXPR)
7266 && TREE_CODE (tem) == COND_EXPR
7267 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7268 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7269 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7270 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7271 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7272 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7273 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7275 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7276 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7277 || flag_syntax_only))
7278 tem = build1 (code, type,
7280 TREE_TYPE (TREE_OPERAND
7281 (TREE_OPERAND (tem, 1), 0)),
7282 TREE_OPERAND (tem, 0),
7283 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7284 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7287 else if (COMPARISON_CLASS_P (arg0))
7289 if (TREE_CODE (type) == BOOLEAN_TYPE)
7291 arg0 = copy_node (arg0);
7292 TREE_TYPE (arg0) = type;
7295 else if (TREE_CODE (type) != INTEGER_TYPE)
7296 return fold_build3 (COND_EXPR, type, arg0,
7297 fold_build1 (code, type,
7299 fold_build1 (code, type,
7300 integer_zero_node));
7309 case FIX_TRUNC_EXPR:
7311 case FIX_FLOOR_EXPR:
7312 case FIX_ROUND_EXPR:
7313 if (TREE_TYPE (op0) == type)
7316 /* If we have (type) (a CMP b) and type is an integral type, return
7317 new expression involving the new type. */
7318 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7319 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7320 TREE_OPERAND (op0, 1));
7322 /* Handle cases of two conversions in a row. */
7323 if (TREE_CODE (op0) == NOP_EXPR
7324 || TREE_CODE (op0) == CONVERT_EXPR)
7326 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7327 tree inter_type = TREE_TYPE (op0);
7328 int inside_int = INTEGRAL_TYPE_P (inside_type);
7329 int inside_ptr = POINTER_TYPE_P (inside_type);
7330 int inside_float = FLOAT_TYPE_P (inside_type);
7331 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7332 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7333 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7334 int inter_int = INTEGRAL_TYPE_P (inter_type);
7335 int inter_ptr = POINTER_TYPE_P (inter_type);
7336 int inter_float = FLOAT_TYPE_P (inter_type);
7337 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7338 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7339 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7340 int final_int = INTEGRAL_TYPE_P (type);
7341 int final_ptr = POINTER_TYPE_P (type);
7342 int final_float = FLOAT_TYPE_P (type);
7343 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7344 unsigned int final_prec = TYPE_PRECISION (type);
7345 int final_unsignedp = TYPE_UNSIGNED (type);
7347 /* In addition to the cases of two conversions in a row
7348 handled below, if we are converting something to its own
7349 type via an object of identical or wider precision, neither
7350 conversion is needed. */
7351 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7352 && (((inter_int || inter_ptr) && final_int)
7353 || (inter_float && final_float))
7354 && inter_prec >= final_prec)
7355 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7357 /* Likewise, if the intermediate and final types are either both
7358 float or both integer, we don't need the middle conversion if
7359 it is wider than the final type and doesn't change the signedness
7360 (for integers). Avoid this if the final type is a pointer
7361 since then we sometimes need the inner conversion. Likewise if
7362 the outer has a precision not equal to the size of its mode. */
7363 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7364 || (inter_float && inside_float)
7365 || (inter_vec && inside_vec))
7366 && inter_prec >= inside_prec
7367 && (inter_float || inter_vec
7368 || inter_unsignedp == inside_unsignedp)
7369 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7370 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7372 && (! final_vec || inter_prec == inside_prec))
7373 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7375 /* If we have a sign-extension of a zero-extended value, we can
7376 replace that by a single zero-extension. */
7377 if (inside_int && inter_int && final_int
7378 && inside_prec < inter_prec && inter_prec < final_prec
7379 && inside_unsignedp && !inter_unsignedp)
7380 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7382 /* Two conversions in a row are not needed unless:
7383 - some conversion is floating-point (overstrict for now), or
7384 - some conversion is a vector (overstrict for now), or
7385 - the intermediate type is narrower than both initial and
7387 - the intermediate type and innermost type differ in signedness,
7388 and the outermost type is wider than the intermediate, or
7389 - the initial type is a pointer type and the precisions of the
7390 intermediate and final types differ, or
7391 - the final type is a pointer type and the precisions of the
7392 initial and intermediate types differ.
7393 - the final type is a pointer type and the initial type not
7394 - the initial type is a pointer to an array and the final type
7396 if (! inside_float && ! inter_float && ! final_float
7397 && ! inside_vec && ! inter_vec && ! final_vec
7398 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7399 && ! (inside_int && inter_int
7400 && inter_unsignedp != inside_unsignedp
7401 && inter_prec < final_prec)
7402 && ((inter_unsignedp && inter_prec > inside_prec)
7403 == (final_unsignedp && final_prec > inter_prec))
7404 && ! (inside_ptr && inter_prec != final_prec)
7405 && ! (final_ptr && inside_prec != inter_prec)
7406 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7407 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7408 && final_ptr == inside_ptr
7410 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7411 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7412 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7415 /* Handle (T *)&A.B.C for A being of type T and B and C
7416 living at offset zero. This occurs frequently in
7417 C++ upcasting and then accessing the base. */
7418 if (TREE_CODE (op0) == ADDR_EXPR
7419 && POINTER_TYPE_P (type)
7420 && handled_component_p (TREE_OPERAND (op0, 0)))
7422 HOST_WIDE_INT bitsize, bitpos;
7424 enum machine_mode mode;
7425 int unsignedp, volatilep;
7426 tree base = TREE_OPERAND (op0, 0);
7427 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7428 &mode, &unsignedp, &volatilep, false);
7429 /* If the reference was to a (constant) zero offset, we can use
7430 the address of the base if it has the same base type
7431 as the result type. */
7432 if (! offset && bitpos == 0
7433 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7434 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7435 return fold_convert (type, build_fold_addr_expr (base));
7438 if (TREE_CODE (op0) == MODIFY_EXPR
7439 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7440 /* Detect assigning a bitfield. */
7441 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7442 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7444 /* Don't leave an assignment inside a conversion
7445 unless assigning a bitfield. */
7446 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7447 /* First do the assignment, then return converted constant. */
7448 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7449 TREE_NO_WARNING (tem) = 1;
7450 TREE_USED (tem) = 1;
7454 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7455 constants (if x has signed type, the sign bit cannot be set
7456 in c). This folds extension into the BIT_AND_EXPR. */
7457 if (INTEGRAL_TYPE_P (type)
7458 && TREE_CODE (type) != BOOLEAN_TYPE
7459 && TREE_CODE (op0) == BIT_AND_EXPR
7460 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7463 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7466 if (TYPE_UNSIGNED (TREE_TYPE (and))
7467 || (TYPE_PRECISION (type)
7468 <= TYPE_PRECISION (TREE_TYPE (and))))
7470 else if (TYPE_PRECISION (TREE_TYPE (and1))
7471 <= HOST_BITS_PER_WIDE_INT
7472 && host_integerp (and1, 1))
7474 unsigned HOST_WIDE_INT cst;
7476 cst = tree_low_cst (and1, 1);
7477 cst &= (HOST_WIDE_INT) -1
7478 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7479 change = (cst == 0);
7480 #ifdef LOAD_EXTEND_OP
7482 && !flag_syntax_only
7483 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7486 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7487 and0 = fold_convert (uns, and0);
7488 and1 = fold_convert (uns, and1);
7494 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7495 TREE_INT_CST_HIGH (and1));
7496 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7497 TREE_CONSTANT_OVERFLOW (and1));
7498 return fold_build2 (BIT_AND_EXPR, type,
7499 fold_convert (type, and0), tem);
7503 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7504 T2 being pointers to types of the same size. */
7505 if (POINTER_TYPE_P (type)
7506 && BINARY_CLASS_P (arg0)
7507 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7508 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7510 tree arg00 = TREE_OPERAND (arg0, 0);
7512 tree t1 = TREE_TYPE (arg00);
7513 tree tt0 = TREE_TYPE (t0);
7514 tree tt1 = TREE_TYPE (t1);
7515 tree s0 = TYPE_SIZE (tt0);
7516 tree s1 = TYPE_SIZE (tt1);
7518 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7519 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7520 TREE_OPERAND (arg0, 1));
7523 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7524 of the same precision, and X is a integer type not narrower than
7525 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7526 if (INTEGRAL_TYPE_P (type)
7527 && TREE_CODE (op0) == BIT_NOT_EXPR
7528 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7529 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7530 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7531 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7533 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7534 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7535 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7536 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7539 tem = fold_convert_const (code, type, arg0);
7540 return tem ? tem : NULL_TREE;
7542 case VIEW_CONVERT_EXPR:
7543 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7544 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7545 return fold_view_convert_expr (type, op0);
7548 tem = fold_negate_expr (arg0);
7550 return fold_convert (type, tem);
7554 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7555 return fold_abs_const (arg0, type);
7556 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7557 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7558 /* Convert fabs((double)float) into (double)fabsf(float). */
7559 else if (TREE_CODE (arg0) == NOP_EXPR
7560 && TREE_CODE (type) == REAL_TYPE)
7562 tree targ0 = strip_float_extensions (arg0);
7564 return fold_convert (type, fold_build1 (ABS_EXPR,
7568 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7569 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7572 /* Strip sign ops from argument. */
7573 if (TREE_CODE (type) == REAL_TYPE)
7575 tem = fold_strip_sign_ops (arg0);
7577 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7582 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7583 return fold_convert (type, arg0);
7584 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7586 tree itype = TREE_TYPE (type);
7587 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7588 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7589 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7591 if (TREE_CODE (arg0) == COMPLEX_CST)
7593 tree itype = TREE_TYPE (type);
7594 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7595 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7596 return build_complex (type, rpart, negate_expr (ipart));
7598 if (TREE_CODE (arg0) == CONJ_EXPR)
7599 return fold_convert (type, TREE_OPERAND (arg0, 0));
7603 if (TREE_CODE (arg0) == INTEGER_CST)
7604 return fold_not_const (arg0, type);
7605 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7606 return TREE_OPERAND (arg0, 0);
7607 /* Convert ~ (-A) to A - 1. */
7608 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7609 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7610 build_int_cst (type, 1));
7611 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7612 else if (INTEGRAL_TYPE_P (type)
7613 && ((TREE_CODE (arg0) == MINUS_EXPR
7614 && integer_onep (TREE_OPERAND (arg0, 1)))
7615 || (TREE_CODE (arg0) == PLUS_EXPR
7616 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7617 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7618 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7619 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7620 && (tem = fold_unary (BIT_NOT_EXPR, type,
7622 TREE_OPERAND (arg0, 0)))))
7623 return fold_build2 (BIT_XOR_EXPR, type, tem,
7624 fold_convert (type, TREE_OPERAND (arg0, 1)));
7625 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7626 && (tem = fold_unary (BIT_NOT_EXPR, type,
7628 TREE_OPERAND (arg0, 1)))))
7629 return fold_build2 (BIT_XOR_EXPR, type,
7630 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7634 case TRUTH_NOT_EXPR:
7635 /* The argument to invert_truthvalue must have Boolean type. */
7636 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7637 arg0 = fold_convert (boolean_type_node, arg0);
7639 /* Note that the operand of this must be an int
7640 and its values must be 0 or 1.
7641 ("true" is a fixed value perhaps depending on the language,
7642 but we don't handle values other than 1 correctly yet.) */
7643 tem = fold_truth_not_expr (arg0);
7646 return fold_convert (type, tem);
7649 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7650 return fold_convert (type, arg0);
7651 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7652 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7653 TREE_OPERAND (arg0, 1));
7654 if (TREE_CODE (arg0) == COMPLEX_CST)
7655 return fold_convert (type, TREE_REALPART (arg0));
7656 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7658 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7659 tem = fold_build2 (TREE_CODE (arg0), itype,
7660 fold_build1 (REALPART_EXPR, itype,
7661 TREE_OPERAND (arg0, 0)),
7662 fold_build1 (REALPART_EXPR, itype,
7663 TREE_OPERAND (arg0, 1)));
7664 return fold_convert (type, tem);
7666 if (TREE_CODE (arg0) == CONJ_EXPR)
7668 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7669 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7670 return fold_convert (type, tem);
7675 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7676 return fold_convert (type, integer_zero_node);
7677 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7678 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7679 TREE_OPERAND (arg0, 0));
7680 if (TREE_CODE (arg0) == COMPLEX_CST)
7681 return fold_convert (type, TREE_IMAGPART (arg0));
7682 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7684 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7685 tem = fold_build2 (TREE_CODE (arg0), itype,
7686 fold_build1 (IMAGPART_EXPR, itype,
7687 TREE_OPERAND (arg0, 0)),
7688 fold_build1 (IMAGPART_EXPR, itype,
7689 TREE_OPERAND (arg0, 1)));
7690 return fold_convert (type, tem);
7692 if (TREE_CODE (arg0) == CONJ_EXPR)
7694 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7695 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7696 return fold_convert (type, negate_expr (tem));
7702 } /* switch (code) */
7705 /* Fold a binary expression of code CODE and type TYPE with operands
7706 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7707 Return the folded expression if folding is successful. Otherwise,
7708 return NULL_TREE. */
7711 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7713 enum tree_code compl_code;
7715 if (code == MIN_EXPR)
7716 compl_code = MAX_EXPR;
7717 else if (code == MAX_EXPR)
7718 compl_code = MIN_EXPR;
7722 /* MIN (MAX (a, b), b) == b. Â */
7723 if (TREE_CODE (op0) == compl_code
7724 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7725 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7727 /* MIN (MAX (b, a), b) == b. Â */
7728 if (TREE_CODE (op0) == compl_code
7729 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7730 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7731 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7733 /* MIN (a, MAX (a, b)) == a. Â */
7734 if (TREE_CODE (op1) == compl_code
7735 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7736 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7737 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7739 /* MIN (a, MAX (b, a)) == a. Â */
7740 if (TREE_CODE (op1) == compl_code
7741 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7742 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7743 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7748 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7749 by changing CODE to reduce the magnitude of constants involved in
7750 ARG0 of the comparison.
7751 Returns a canonicalized comparison tree if a simplification was
7752 possible, otherwise returns NULL_TREE. */
7755 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7756 tree arg0, tree arg1)
7758 enum tree_code code0 = TREE_CODE (arg0);
7759 tree t, cst0 = NULL_TREE;
7763 /* Match A +- CST code arg1 and CST code arg1. */
7764 if (!(((code0 == MINUS_EXPR
7765 || code0 == PLUS_EXPR)
7766 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7767 || code0 == INTEGER_CST))
7770 /* Identify the constant in arg0 and its sign. */
7771 if (code0 == INTEGER_CST)
7774 cst0 = TREE_OPERAND (arg0, 1);
7775 sgn0 = tree_int_cst_sgn (cst0);
7777 /* Overflowed constants and zero will cause problems. */
7778 if (integer_zerop (cst0)
7779 || TREE_OVERFLOW (cst0))
7782 /* See if we can reduce the mangitude of the constant in
7783 arg0 by changing the comparison code. */
7784 if (code0 == INTEGER_CST)
7786 /* CST <= arg1 -> CST-1 < arg1. */
7787 if (code == LE_EXPR && sgn0 == 1)
7789 /* -CST < arg1 -> -CST-1 <= arg1. */
7790 else if (code == LT_EXPR && sgn0 == -1)
7792 /* CST > arg1 -> CST-1 >= arg1. */
7793 else if (code == GT_EXPR && sgn0 == 1)
7795 /* -CST >= arg1 -> -CST-1 > arg1. */
7796 else if (code == GE_EXPR && sgn0 == -1)
7800 /* arg1 code' CST' might be more canonical. */
7805 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7807 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7809 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7810 else if (code == GT_EXPR
7811 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7813 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7814 else if (code == LE_EXPR
7815 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7817 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7818 else if (code == GE_EXPR
7819 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7825 /* Now build the constant reduced in magnitude. */
7826 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7827 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7828 if (code0 != INTEGER_CST)
7829 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7831 /* If swapping might yield to a more canonical form, do so. */
7833 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7835 return fold_build2 (code, type, t, arg1);
7838 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7839 overflow further. Try to decrease the magnitude of constants involved
7840 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7841 and put sole constants at the second argument position.
7842 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7845 maybe_canonicalize_comparison (enum tree_code code, tree type,
7846 tree arg0, tree arg1)
7850 /* In principle pointers also have undefined overflow behavior,
7851 but that causes problems elsewhere. */
7852 if ((flag_wrapv || flag_trapv)
7853 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7854 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7857 /* Try canonicalization by simplifying arg0. */
7858 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7862 /* Try canonicalization by simplifying arg1 using the swapped
7864 code = swap_tree_comparison (code);
7865 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7868 /* Subroutine of fold_binary. This routine performs all of the
7869 transformations that are common to the equality/inequality
7870 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7871 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7872 fold_binary should call fold_binary. Fold a comparison with
7873 tree code CODE and type TYPE with operands OP0 and OP1. Return
7874 the folded comparison or NULL_TREE. */
7877 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7879 tree arg0, arg1, tem;
7884 STRIP_SIGN_NOPS (arg0);
7885 STRIP_SIGN_NOPS (arg1);
7887 tem = fold_relational_const (code, type, arg0, arg1);
7888 if (tem != NULL_TREE)
7891 /* If one arg is a real or integer constant, put it last. */
7892 if (tree_swap_operands_p (arg0, arg1, true))
7893 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7895 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7896 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7897 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7898 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7899 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7900 && !(flag_wrapv || flag_trapv))
7901 && (TREE_CODE (arg1) == INTEGER_CST
7902 && !TREE_OVERFLOW (arg1)))
7904 tree const1 = TREE_OPERAND (arg0, 1);
7906 tree variable = TREE_OPERAND (arg0, 0);
7909 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7911 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7912 TREE_TYPE (arg1), const2, const1);
7913 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7914 && (TREE_CODE (lhs) != INTEGER_CST
7915 || !TREE_OVERFLOW (lhs)))
7916 return fold_build2 (code, type, variable, lhs);
7919 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7920 same object, then we can fold this to a comparison of the two offsets in
7921 signed size type. This is possible because pointer arithmetic is
7922 restricted to retain within an object and overflow on pointer differences
7923 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7924 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7925 && !flag_wrapv && !flag_trapv)
7927 tree base0, offset0, base1, offset1;
7929 if (extract_array_ref (arg0, &base0, &offset0)
7930 && extract_array_ref (arg1, &base1, &offset1)
7931 && operand_equal_p (base0, base1, 0))
7933 tree signed_size_type_node;
7934 signed_size_type_node = signed_type_for (size_type_node);
7936 /* By converting to signed size type we cover middle-end pointer
7937 arithmetic which operates on unsigned pointer types of size
7938 type size and ARRAY_REF offsets which are properly sign or
7939 zero extended from their type in case it is narrower than
7941 if (offset0 == NULL_TREE)
7942 offset0 = build_int_cst (signed_size_type_node, 0);
7944 offset0 = fold_convert (signed_size_type_node, offset0);
7945 if (offset1 == NULL_TREE)
7946 offset1 = build_int_cst (signed_size_type_node, 0);
7948 offset1 = fold_convert (signed_size_type_node, offset1);
7950 return fold_build2 (code, type, offset0, offset1);
7954 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
7955 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
7956 the resulting offset is smaller in absolute value than the
7958 if (!(flag_wrapv || flag_trapv)
7959 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7960 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7961 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7962 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
7963 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
7964 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7965 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
7967 tree const1 = TREE_OPERAND (arg0, 1);
7968 tree const2 = TREE_OPERAND (arg1, 1);
7969 tree variable1 = TREE_OPERAND (arg0, 0);
7970 tree variable2 = TREE_OPERAND (arg1, 0);
7973 /* Put the constant on the side where it doesn't overflow and is
7974 of lower absolute value than before. */
7975 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
7976 ? MINUS_EXPR : PLUS_EXPR,
7978 if (!TREE_OVERFLOW (cst)
7979 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
7980 return fold_build2 (code, type,
7982 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
7985 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
7986 ? MINUS_EXPR : PLUS_EXPR,
7988 if (!TREE_OVERFLOW (cst)
7989 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
7990 return fold_build2 (code, type,
7991 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
7996 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8000 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8002 tree targ0 = strip_float_extensions (arg0);
8003 tree targ1 = strip_float_extensions (arg1);
8004 tree newtype = TREE_TYPE (targ0);
8006 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8007 newtype = TREE_TYPE (targ1);
8009 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8010 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8011 return fold_build2 (code, type, fold_convert (newtype, targ0),
8012 fold_convert (newtype, targ1));
8014 /* (-a) CMP (-b) -> b CMP a */
8015 if (TREE_CODE (arg0) == NEGATE_EXPR
8016 && TREE_CODE (arg1) == NEGATE_EXPR)
8017 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8018 TREE_OPERAND (arg0, 0));
8020 if (TREE_CODE (arg1) == REAL_CST)
8022 REAL_VALUE_TYPE cst;
8023 cst = TREE_REAL_CST (arg1);
8025 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8026 if (TREE_CODE (arg0) == NEGATE_EXPR)
8027 return fold_build2 (swap_tree_comparison (code), type,
8028 TREE_OPERAND (arg0, 0),
8029 build_real (TREE_TYPE (arg1),
8030 REAL_VALUE_NEGATE (cst)));
8032 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8033 /* a CMP (-0) -> a CMP 0 */
8034 if (REAL_VALUE_MINUS_ZERO (cst))
8035 return fold_build2 (code, type, arg0,
8036 build_real (TREE_TYPE (arg1), dconst0));
8038 /* x != NaN is always true, other ops are always false. */
8039 if (REAL_VALUE_ISNAN (cst)
8040 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8042 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8043 return omit_one_operand (type, tem, arg0);
8046 /* Fold comparisons against infinity. */
8047 if (REAL_VALUE_ISINF (cst))
8049 tem = fold_inf_compare (code, type, arg0, arg1);
8050 if (tem != NULL_TREE)
8055 /* If this is a comparison of a real constant with a PLUS_EXPR
8056 or a MINUS_EXPR of a real constant, we can convert it into a
8057 comparison with a revised real constant as long as no overflow
8058 occurs when unsafe_math_optimizations are enabled. */
8059 if (flag_unsafe_math_optimizations
8060 && TREE_CODE (arg1) == REAL_CST
8061 && (TREE_CODE (arg0) == PLUS_EXPR
8062 || TREE_CODE (arg0) == MINUS_EXPR)
8063 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8064 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8065 ? MINUS_EXPR : PLUS_EXPR,
8066 arg1, TREE_OPERAND (arg0, 1), 0))
8067 && ! TREE_CONSTANT_OVERFLOW (tem))
8068 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8070 /* Likewise, we can simplify a comparison of a real constant with
8071 a MINUS_EXPR whose first operand is also a real constant, i.e.
8072 (c1 - x) < c2 becomes x > c1-c2. */
8073 if (flag_unsafe_math_optimizations
8074 && TREE_CODE (arg1) == REAL_CST
8075 && TREE_CODE (arg0) == MINUS_EXPR
8076 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8077 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8079 && ! TREE_CONSTANT_OVERFLOW (tem))
8080 return fold_build2 (swap_tree_comparison (code), type,
8081 TREE_OPERAND (arg0, 1), tem);
8083 /* Fold comparisons against built-in math functions. */
8084 if (TREE_CODE (arg1) == REAL_CST
8085 && flag_unsafe_math_optimizations
8086 && ! flag_errno_math)
8088 enum built_in_function fcode = builtin_mathfn_code (arg0);
8090 if (fcode != END_BUILTINS)
8092 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8093 if (tem != NULL_TREE)
8099 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8100 if (TREE_CONSTANT (arg1)
8101 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8102 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8103 /* This optimization is invalid for ordered comparisons
8104 if CONST+INCR overflows or if foo+incr might overflow.
8105 This optimization is invalid for floating point due to rounding.
8106 For pointer types we assume overflow doesn't happen. */
8107 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8108 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8109 && (code == EQ_EXPR || code == NE_EXPR))))
8111 tree varop, newconst;
8113 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8115 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8116 arg1, TREE_OPERAND (arg0, 1));
8117 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8118 TREE_OPERAND (arg0, 0),
8119 TREE_OPERAND (arg0, 1));
8123 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8124 arg1, TREE_OPERAND (arg0, 1));
8125 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8126 TREE_OPERAND (arg0, 0),
8127 TREE_OPERAND (arg0, 1));
8131 /* If VAROP is a reference to a bitfield, we must mask
8132 the constant by the width of the field. */
8133 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8134 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8135 && host_integerp (DECL_SIZE (TREE_OPERAND
8136 (TREE_OPERAND (varop, 0), 1)), 1))
8138 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8139 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8140 tree folded_compare, shift;
8142 /* First check whether the comparison would come out
8143 always the same. If we don't do that we would
8144 change the meaning with the masking. */
8145 folded_compare = fold_build2 (code, type,
8146 TREE_OPERAND (varop, 0), arg1);
8147 if (TREE_CODE (folded_compare) == INTEGER_CST)
8148 return omit_one_operand (type, folded_compare, varop);
8150 shift = build_int_cst (NULL_TREE,
8151 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8152 shift = fold_convert (TREE_TYPE (varop), shift);
8153 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8155 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8159 return fold_build2 (code, type, varop, newconst);
8162 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8163 && (TREE_CODE (arg0) == NOP_EXPR
8164 || TREE_CODE (arg0) == CONVERT_EXPR))
8166 /* If we are widening one operand of an integer comparison,
8167 see if the other operand is similarly being widened. Perhaps we
8168 can do the comparison in the narrower type. */
8169 tem = fold_widened_comparison (code, type, arg0, arg1);
8173 /* Or if we are changing signedness. */
8174 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8179 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8180 constant, we can simplify it. */
8181 if (TREE_CODE (arg1) == INTEGER_CST
8182 && (TREE_CODE (arg0) == MIN_EXPR
8183 || TREE_CODE (arg0) == MAX_EXPR)
8184 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8186 tem = optimize_minmax_comparison (code, type, op0, op1);
8191 /* Simplify comparison of something with itself. (For IEEE
8192 floating-point, we can only do some of these simplifications.) */
8193 if (operand_equal_p (arg0, arg1, 0))
8198 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8199 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8200 return constant_boolean_node (1, type);
8205 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8206 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8207 return constant_boolean_node (1, type);
8208 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8211 /* For NE, we can only do this simplification if integer
8212 or we don't honor IEEE floating point NaNs. */
8213 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8214 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8216 /* ... fall through ... */
8219 return constant_boolean_node (0, type);
8225 /* If we are comparing an expression that just has comparisons
8226 of two integer values, arithmetic expressions of those comparisons,
8227 and constants, we can simplify it. There are only three cases
8228 to check: the two values can either be equal, the first can be
8229 greater, or the second can be greater. Fold the expression for
8230 those three values. Since each value must be 0 or 1, we have
8231 eight possibilities, each of which corresponds to the constant 0
8232 or 1 or one of the six possible comparisons.
8234 This handles common cases like (a > b) == 0 but also handles
8235 expressions like ((x > y) - (y > x)) > 0, which supposedly
8236 occur in macroized code. */
8238 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8240 tree cval1 = 0, cval2 = 0;
8243 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8244 /* Don't handle degenerate cases here; they should already
8245 have been handled anyway. */
8246 && cval1 != 0 && cval2 != 0
8247 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8248 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8249 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8250 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8251 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8252 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8253 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8255 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8256 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8258 /* We can't just pass T to eval_subst in case cval1 or cval2
8259 was the same as ARG1. */
8262 = fold_build2 (code, type,
8263 eval_subst (arg0, cval1, maxval,
8267 = fold_build2 (code, type,
8268 eval_subst (arg0, cval1, maxval,
8272 = fold_build2 (code, type,
8273 eval_subst (arg0, cval1, minval,
8277 /* All three of these results should be 0 or 1. Confirm they are.
8278 Then use those values to select the proper code to use. */
8280 if (TREE_CODE (high_result) == INTEGER_CST
8281 && TREE_CODE (equal_result) == INTEGER_CST
8282 && TREE_CODE (low_result) == INTEGER_CST)
8284 /* Make a 3-bit mask with the high-order bit being the
8285 value for `>', the next for '=', and the low for '<'. */
8286 switch ((integer_onep (high_result) * 4)
8287 + (integer_onep (equal_result) * 2)
8288 + integer_onep (low_result))
8292 return omit_one_operand (type, integer_zero_node, arg0);
8313 return omit_one_operand (type, integer_one_node, arg0);
8317 return save_expr (build2 (code, type, cval1, cval2));
8318 return fold_build2 (code, type, cval1, cval2);
8323 /* Fold a comparison of the address of COMPONENT_REFs with the same
8324 type and component to a comparison of the address of the base
8325 object. In short, &x->a OP &y->a to x OP y and
8326 &x->a OP &y.a to x OP &y */
8327 if (TREE_CODE (arg0) == ADDR_EXPR
8328 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8329 && TREE_CODE (arg1) == ADDR_EXPR
8330 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8332 tree cref0 = TREE_OPERAND (arg0, 0);
8333 tree cref1 = TREE_OPERAND (arg1, 0);
8334 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8336 tree op0 = TREE_OPERAND (cref0, 0);
8337 tree op1 = TREE_OPERAND (cref1, 0);
8338 return fold_build2 (code, type,
8339 build_fold_addr_expr (op0),
8340 build_fold_addr_expr (op1));
8344 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8345 into a single range test. */
8346 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8347 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8348 && TREE_CODE (arg1) == INTEGER_CST
8349 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8350 && !integer_zerop (TREE_OPERAND (arg0, 1))
8351 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8352 && !TREE_OVERFLOW (arg1))
8354 tem = fold_div_compare (code, type, arg0, arg1);
8355 if (tem != NULL_TREE)
8359 /* Fold ~X op ~Y as Y op X. */
8360 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8361 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8362 return fold_build2 (code, type,
8363 TREE_OPERAND (arg1, 0),
8364 TREE_OPERAND (arg0, 0));
8366 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8367 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8368 && TREE_CODE (arg1) == INTEGER_CST)
8369 return fold_build2 (swap_tree_comparison (code), type,
8370 TREE_OPERAND (arg0, 0),
8371 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8377 /* Subroutine of fold_binary. Optimize complex multiplications of the
8378 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8379 argument EXPR represents the expression "z" of type TYPE. */
8382 fold_mult_zconjz (tree type, tree expr)
8384 tree itype = TREE_TYPE (type);
8385 tree rpart, ipart, tem;
8387 if (TREE_CODE (expr) == COMPLEX_EXPR)
8389 rpart = TREE_OPERAND (expr, 0);
8390 ipart = TREE_OPERAND (expr, 1);
8392 else if (TREE_CODE (expr) == COMPLEX_CST)
8394 rpart = TREE_REALPART (expr);
8395 ipart = TREE_IMAGPART (expr);
8399 expr = save_expr (expr);
8400 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8401 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8404 rpart = save_expr (rpart);
8405 ipart = save_expr (ipart);
8406 tem = fold_build2 (PLUS_EXPR, itype,
8407 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8408 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8409 return fold_build2 (COMPLEX_EXPR, type, tem,
8410 fold_convert (itype, integer_zero_node));
8414 /* Fold a binary expression of code CODE and type TYPE with operands
8415 OP0 and OP1. Return the folded expression if folding is
8416 successful. Otherwise, return NULL_TREE. */
8419 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8421 enum tree_code_class kind = TREE_CODE_CLASS (code);
8422 tree arg0, arg1, tem;
8423 tree t1 = NULL_TREE;
8425 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8426 && TREE_CODE_LENGTH (code) == 2
8428 && op1 != NULL_TREE);
8433 /* Strip any conversions that don't change the mode. This is
8434 safe for every expression, except for a comparison expression
8435 because its signedness is derived from its operands. So, in
8436 the latter case, only strip conversions that don't change the
8439 Note that this is done as an internal manipulation within the
8440 constant folder, in order to find the simplest representation
8441 of the arguments so that their form can be studied. In any
8442 cases, the appropriate type conversions should be put back in
8443 the tree that will get out of the constant folder. */
8445 if (kind == tcc_comparison)
8447 STRIP_SIGN_NOPS (arg0);
8448 STRIP_SIGN_NOPS (arg1);
8456 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8457 constant but we can't do arithmetic on them. */
8458 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8459 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8460 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8461 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8463 if (kind == tcc_binary)
8464 tem = const_binop (code, arg0, arg1, 0);
8465 else if (kind == tcc_comparison)
8466 tem = fold_relational_const (code, type, arg0, arg1);
8470 if (tem != NULL_TREE)
8472 if (TREE_TYPE (tem) != type)
8473 tem = fold_convert (type, tem);
8478 /* If this is a commutative operation, and ARG0 is a constant, move it
8479 to ARG1 to reduce the number of tests below. */
8480 if (commutative_tree_code (code)
8481 && tree_swap_operands_p (arg0, arg1, true))
8482 return fold_build2 (code, type, op1, op0);
8484 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8486 First check for cases where an arithmetic operation is applied to a
8487 compound, conditional, or comparison operation. Push the arithmetic
8488 operation inside the compound or conditional to see if any folding
8489 can then be done. Convert comparison to conditional for this purpose.
8490 The also optimizes non-constant cases that used to be done in
8493 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8494 one of the operands is a comparison and the other is a comparison, a
8495 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8496 code below would make the expression more complex. Change it to a
8497 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8498 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8500 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8501 || code == EQ_EXPR || code == NE_EXPR)
8502 && ((truth_value_p (TREE_CODE (arg0))
8503 && (truth_value_p (TREE_CODE (arg1))
8504 || (TREE_CODE (arg1) == BIT_AND_EXPR
8505 && integer_onep (TREE_OPERAND (arg1, 1)))))
8506 || (truth_value_p (TREE_CODE (arg1))
8507 && (truth_value_p (TREE_CODE (arg0))
8508 || (TREE_CODE (arg0) == BIT_AND_EXPR
8509 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8511 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8512 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8515 fold_convert (boolean_type_node, arg0),
8516 fold_convert (boolean_type_node, arg1));
8518 if (code == EQ_EXPR)
8519 tem = invert_truthvalue (tem);
8521 return fold_convert (type, tem);
8524 if (TREE_CODE_CLASS (code) == tcc_binary
8525 || TREE_CODE_CLASS (code) == tcc_comparison)
8527 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8528 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8529 fold_build2 (code, type,
8530 TREE_OPERAND (arg0, 1), op1));
8531 if (TREE_CODE (arg1) == COMPOUND_EXPR
8532 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8533 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8534 fold_build2 (code, type,
8535 op0, TREE_OPERAND (arg1, 1)));
8537 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8539 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8541 /*cond_first_p=*/1);
8542 if (tem != NULL_TREE)
8546 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8548 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8550 /*cond_first_p=*/0);
8551 if (tem != NULL_TREE)
8559 /* A + (-B) -> A - B */
8560 if (TREE_CODE (arg1) == NEGATE_EXPR)
8561 return fold_build2 (MINUS_EXPR, type,
8562 fold_convert (type, arg0),
8563 fold_convert (type, TREE_OPERAND (arg1, 0)));
8564 /* (-A) + B -> B - A */
8565 if (TREE_CODE (arg0) == NEGATE_EXPR
8566 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8567 return fold_build2 (MINUS_EXPR, type,
8568 fold_convert (type, arg1),
8569 fold_convert (type, TREE_OPERAND (arg0, 0)));
8570 /* Convert ~A + 1 to -A. */
8571 if (INTEGRAL_TYPE_P (type)
8572 && TREE_CODE (arg0) == BIT_NOT_EXPR
8573 && integer_onep (arg1))
8574 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8576 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8578 if ((TREE_CODE (arg0) == MULT_EXPR
8579 || TREE_CODE (arg1) == MULT_EXPR)
8580 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8582 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8587 if (! FLOAT_TYPE_P (type))
8589 if (integer_zerop (arg1))
8590 return non_lvalue (fold_convert (type, arg0));
8592 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8593 with a constant, and the two constants have no bits in common,
8594 we should treat this as a BIT_IOR_EXPR since this may produce more
8596 if (TREE_CODE (arg0) == BIT_AND_EXPR
8597 && TREE_CODE (arg1) == BIT_AND_EXPR
8598 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8599 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8600 && integer_zerop (const_binop (BIT_AND_EXPR,
8601 TREE_OPERAND (arg0, 1),
8602 TREE_OPERAND (arg1, 1), 0)))
8604 code = BIT_IOR_EXPR;
8608 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8609 (plus (plus (mult) (mult)) (foo)) so that we can
8610 take advantage of the factoring cases below. */
8611 if (((TREE_CODE (arg0) == PLUS_EXPR
8612 || TREE_CODE (arg0) == MINUS_EXPR)
8613 && TREE_CODE (arg1) == MULT_EXPR)
8614 || ((TREE_CODE (arg1) == PLUS_EXPR
8615 || TREE_CODE (arg1) == MINUS_EXPR)
8616 && TREE_CODE (arg0) == MULT_EXPR))
8618 tree parg0, parg1, parg, marg;
8619 enum tree_code pcode;
8621 if (TREE_CODE (arg1) == MULT_EXPR)
8622 parg = arg0, marg = arg1;
8624 parg = arg1, marg = arg0;
8625 pcode = TREE_CODE (parg);
8626 parg0 = TREE_OPERAND (parg, 0);
8627 parg1 = TREE_OPERAND (parg, 1);
8631 if (TREE_CODE (parg0) == MULT_EXPR
8632 && TREE_CODE (parg1) != MULT_EXPR)
8633 return fold_build2 (pcode, type,
8634 fold_build2 (PLUS_EXPR, type,
8635 fold_convert (type, parg0),
8636 fold_convert (type, marg)),
8637 fold_convert (type, parg1));
8638 if (TREE_CODE (parg0) != MULT_EXPR
8639 && TREE_CODE (parg1) == MULT_EXPR)
8640 return fold_build2 (PLUS_EXPR, type,
8641 fold_convert (type, parg0),
8642 fold_build2 (pcode, type,
8643 fold_convert (type, marg),
8648 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8649 of the array. Loop optimizer sometimes produce this type of
8651 if (TREE_CODE (arg0) == ADDR_EXPR)
8653 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8655 return fold_convert (type, tem);
8657 else if (TREE_CODE (arg1) == ADDR_EXPR)
8659 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8661 return fold_convert (type, tem);
8666 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8667 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8668 return non_lvalue (fold_convert (type, arg0));
8670 /* Likewise if the operands are reversed. */
8671 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8672 return non_lvalue (fold_convert (type, arg1));
8674 /* Convert X + -C into X - C. */
8675 if (TREE_CODE (arg1) == REAL_CST
8676 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8678 tem = fold_negate_const (arg1, type);
8679 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8680 return fold_build2 (MINUS_EXPR, type,
8681 fold_convert (type, arg0),
8682 fold_convert (type, tem));
8685 if (flag_unsafe_math_optimizations
8686 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8687 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8688 && (tem = distribute_real_division (code, type, arg0, arg1)))
8691 /* Convert x+x into x*2.0. */
8692 if (operand_equal_p (arg0, arg1, 0)
8693 && SCALAR_FLOAT_TYPE_P (type))
8694 return fold_build2 (MULT_EXPR, type, arg0,
8695 build_real (type, dconst2));
8697 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8698 if (flag_unsafe_math_optimizations
8699 && TREE_CODE (arg1) == PLUS_EXPR
8700 && TREE_CODE (arg0) != MULT_EXPR)
8702 tree tree10 = TREE_OPERAND (arg1, 0);
8703 tree tree11 = TREE_OPERAND (arg1, 1);
8704 if (TREE_CODE (tree11) == MULT_EXPR
8705 && TREE_CODE (tree10) == MULT_EXPR)
8708 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8709 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8712 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8713 if (flag_unsafe_math_optimizations
8714 && TREE_CODE (arg0) == PLUS_EXPR
8715 && TREE_CODE (arg1) != MULT_EXPR)
8717 tree tree00 = TREE_OPERAND (arg0, 0);
8718 tree tree01 = TREE_OPERAND (arg0, 1);
8719 if (TREE_CODE (tree01) == MULT_EXPR
8720 && TREE_CODE (tree00) == MULT_EXPR)
8723 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8724 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8730 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8731 is a rotate of A by C1 bits. */
8732 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8733 is a rotate of A by B bits. */
8735 enum tree_code code0, code1;
8736 code0 = TREE_CODE (arg0);
8737 code1 = TREE_CODE (arg1);
8738 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8739 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8740 && operand_equal_p (TREE_OPERAND (arg0, 0),
8741 TREE_OPERAND (arg1, 0), 0)
8742 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8744 tree tree01, tree11;
8745 enum tree_code code01, code11;
8747 tree01 = TREE_OPERAND (arg0, 1);
8748 tree11 = TREE_OPERAND (arg1, 1);
8749 STRIP_NOPS (tree01);
8750 STRIP_NOPS (tree11);
8751 code01 = TREE_CODE (tree01);
8752 code11 = TREE_CODE (tree11);
8753 if (code01 == INTEGER_CST
8754 && code11 == INTEGER_CST
8755 && TREE_INT_CST_HIGH (tree01) == 0
8756 && TREE_INT_CST_HIGH (tree11) == 0
8757 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8758 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8759 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8760 code0 == LSHIFT_EXPR ? tree01 : tree11);
8761 else if (code11 == MINUS_EXPR)
8763 tree tree110, tree111;
8764 tree110 = TREE_OPERAND (tree11, 0);
8765 tree111 = TREE_OPERAND (tree11, 1);
8766 STRIP_NOPS (tree110);
8767 STRIP_NOPS (tree111);
8768 if (TREE_CODE (tree110) == INTEGER_CST
8769 && 0 == compare_tree_int (tree110,
8771 (TREE_TYPE (TREE_OPERAND
8773 && operand_equal_p (tree01, tree111, 0))
8774 return build2 ((code0 == LSHIFT_EXPR
8777 type, TREE_OPERAND (arg0, 0), tree01);
8779 else if (code01 == MINUS_EXPR)
8781 tree tree010, tree011;
8782 tree010 = TREE_OPERAND (tree01, 0);
8783 tree011 = TREE_OPERAND (tree01, 1);
8784 STRIP_NOPS (tree010);
8785 STRIP_NOPS (tree011);
8786 if (TREE_CODE (tree010) == INTEGER_CST
8787 && 0 == compare_tree_int (tree010,
8789 (TREE_TYPE (TREE_OPERAND
8791 && operand_equal_p (tree11, tree011, 0))
8792 return build2 ((code0 != LSHIFT_EXPR
8795 type, TREE_OPERAND (arg0, 0), tree11);
8801 /* In most languages, can't associate operations on floats through
8802 parentheses. Rather than remember where the parentheses were, we
8803 don't associate floats at all, unless the user has specified
8804 -funsafe-math-optimizations. */
8806 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8808 tree var0, con0, lit0, minus_lit0;
8809 tree var1, con1, lit1, minus_lit1;
8811 /* Split both trees into variables, constants, and literals. Then
8812 associate each group together, the constants with literals,
8813 then the result with variables. This increases the chances of
8814 literals being recombined later and of generating relocatable
8815 expressions for the sum of a constant and literal. */
8816 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8817 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8818 code == MINUS_EXPR);
8820 /* Only do something if we found more than two objects. Otherwise,
8821 nothing has changed and we risk infinite recursion. */
8822 if (2 < ((var0 != 0) + (var1 != 0)
8823 + (con0 != 0) + (con1 != 0)
8824 + (lit0 != 0) + (lit1 != 0)
8825 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8827 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8828 if (code == MINUS_EXPR)
8831 var0 = associate_trees (var0, var1, code, type);
8832 con0 = associate_trees (con0, con1, code, type);
8833 lit0 = associate_trees (lit0, lit1, code, type);
8834 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8836 /* Preserve the MINUS_EXPR if the negative part of the literal is
8837 greater than the positive part. Otherwise, the multiplicative
8838 folding code (i.e extract_muldiv) may be fooled in case
8839 unsigned constants are subtracted, like in the following
8840 example: ((X*2 + 4) - 8U)/2. */
8841 if (minus_lit0 && lit0)
8843 if (TREE_CODE (lit0) == INTEGER_CST
8844 && TREE_CODE (minus_lit0) == INTEGER_CST
8845 && tree_int_cst_lt (lit0, minus_lit0))
8847 minus_lit0 = associate_trees (minus_lit0, lit0,
8853 lit0 = associate_trees (lit0, minus_lit0,
8861 return fold_convert (type,
8862 associate_trees (var0, minus_lit0,
8866 con0 = associate_trees (con0, minus_lit0,
8868 return fold_convert (type,
8869 associate_trees (var0, con0,
8874 con0 = associate_trees (con0, lit0, code, type);
8875 return fold_convert (type, associate_trees (var0, con0,
8883 /* A - (-B) -> A + B */
8884 if (TREE_CODE (arg1) == NEGATE_EXPR)
8885 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8886 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8887 if (TREE_CODE (arg0) == NEGATE_EXPR
8888 && (FLOAT_TYPE_P (type)
8889 || INTEGRAL_TYPE_P (type))
8890 && negate_expr_p (arg1)
8891 && reorder_operands_p (arg0, arg1))
8892 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8893 TREE_OPERAND (arg0, 0));
8894 /* Convert -A - 1 to ~A. */
8895 if (INTEGRAL_TYPE_P (type)
8896 && TREE_CODE (arg0) == NEGATE_EXPR
8897 && integer_onep (arg1))
8898 return fold_build1 (BIT_NOT_EXPR, type,
8899 fold_convert (type, TREE_OPERAND (arg0, 0)));
8901 /* Convert -1 - A to ~A. */
8902 if (INTEGRAL_TYPE_P (type)
8903 && integer_all_onesp (arg0))
8904 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8906 if (! FLOAT_TYPE_P (type))
8908 if (integer_zerop (arg0))
8909 return negate_expr (fold_convert (type, arg1));
8910 if (integer_zerop (arg1))
8911 return non_lvalue (fold_convert (type, arg0));
8913 /* Fold A - (A & B) into ~B & A. */
8914 if (!TREE_SIDE_EFFECTS (arg0)
8915 && TREE_CODE (arg1) == BIT_AND_EXPR)
8917 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8918 return fold_build2 (BIT_AND_EXPR, type,
8919 fold_build1 (BIT_NOT_EXPR, type,
8920 TREE_OPERAND (arg1, 0)),
8922 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8923 return fold_build2 (BIT_AND_EXPR, type,
8924 fold_build1 (BIT_NOT_EXPR, type,
8925 TREE_OPERAND (arg1, 1)),
8929 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8930 any power of 2 minus 1. */
8931 if (TREE_CODE (arg0) == BIT_AND_EXPR
8932 && TREE_CODE (arg1) == BIT_AND_EXPR
8933 && operand_equal_p (TREE_OPERAND (arg0, 0),
8934 TREE_OPERAND (arg1, 0), 0))
8936 tree mask0 = TREE_OPERAND (arg0, 1);
8937 tree mask1 = TREE_OPERAND (arg1, 1);
8938 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8940 if (operand_equal_p (tem, mask1, 0))
8942 tem = fold_build2 (BIT_XOR_EXPR, type,
8943 TREE_OPERAND (arg0, 0), mask1);
8944 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8949 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8950 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8951 return non_lvalue (fold_convert (type, arg0));
8953 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8954 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8955 (-ARG1 + ARG0) reduces to -ARG1. */
8956 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8957 return negate_expr (fold_convert (type, arg1));
8959 /* Fold &x - &x. This can happen from &x.foo - &x.
8960 This is unsafe for certain floats even in non-IEEE formats.
8961 In IEEE, it is unsafe because it does wrong for NaNs.
8962 Also note that operand_equal_p is always false if an operand
8965 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8966 && operand_equal_p (arg0, arg1, 0))
8967 return fold_convert (type, integer_zero_node);
8969 /* A - B -> A + (-B) if B is easily negatable. */
8970 if (negate_expr_p (arg1)
8971 && ((FLOAT_TYPE_P (type)
8972 /* Avoid this transformation if B is a positive REAL_CST. */
8973 && (TREE_CODE (arg1) != REAL_CST
8974 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8975 || INTEGRAL_TYPE_P (type)))
8976 return fold_build2 (PLUS_EXPR, type,
8977 fold_convert (type, arg0),
8978 fold_convert (type, negate_expr (arg1)));
8980 /* Try folding difference of addresses. */
8984 if ((TREE_CODE (arg0) == ADDR_EXPR
8985 || TREE_CODE (arg1) == ADDR_EXPR)
8986 && ptr_difference_const (arg0, arg1, &diff))
8987 return build_int_cst_type (type, diff);
8990 /* Fold &a[i] - &a[j] to i-j. */
8991 if (TREE_CODE (arg0) == ADDR_EXPR
8992 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8993 && TREE_CODE (arg1) == ADDR_EXPR
8994 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8996 tree aref0 = TREE_OPERAND (arg0, 0);
8997 tree aref1 = TREE_OPERAND (arg1, 0);
8998 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8999 TREE_OPERAND (aref1, 0), 0))
9001 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9002 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9003 tree esz = array_ref_element_size (aref0);
9004 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9005 return fold_build2 (MULT_EXPR, type, diff,
9006 fold_convert (type, esz));
9011 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9012 of the array. Loop optimizer sometimes produce this type of
9014 if (TREE_CODE (arg0) == ADDR_EXPR)
9016 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9018 return fold_convert (type, tem);
9021 if (flag_unsafe_math_optimizations
9022 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9023 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9024 && (tem = distribute_real_division (code, type, arg0, arg1)))
9027 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9029 if ((TREE_CODE (arg0) == MULT_EXPR
9030 || TREE_CODE (arg1) == MULT_EXPR)
9031 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9033 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9041 /* (-A) * (-B) -> A * B */
9042 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9043 return fold_build2 (MULT_EXPR, type,
9044 fold_convert (type, TREE_OPERAND (arg0, 0)),
9045 fold_convert (type, negate_expr (arg1)));
9046 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9047 return fold_build2 (MULT_EXPR, type,
9048 fold_convert (type, negate_expr (arg0)),
9049 fold_convert (type, TREE_OPERAND (arg1, 0)));
9051 if (! FLOAT_TYPE_P (type))
9053 if (integer_zerop (arg1))
9054 return omit_one_operand (type, arg1, arg0);
9055 if (integer_onep (arg1))
9056 return non_lvalue (fold_convert (type, arg0));
9057 /* Transform x * -1 into -x. */
9058 if (integer_all_onesp (arg1))
9059 return fold_convert (type, negate_expr (arg0));
9060 /* Transform x * -C into -x * C if x is easily negatable. */
9061 if (TREE_CODE (arg1) == INTEGER_CST
9062 && tree_int_cst_sgn (arg1) == -1
9063 && negate_expr_p (arg0)
9064 && (tem = negate_expr (arg1)) != arg1
9065 && !TREE_OVERFLOW (tem))
9066 return fold_build2 (MULT_EXPR, type,
9067 negate_expr (arg0), tem);
9069 /* (a * (1 << b)) is (a << b) */
9070 if (TREE_CODE (arg1) == LSHIFT_EXPR
9071 && integer_onep (TREE_OPERAND (arg1, 0)))
9072 return fold_build2 (LSHIFT_EXPR, type, arg0,
9073 TREE_OPERAND (arg1, 1));
9074 if (TREE_CODE (arg0) == LSHIFT_EXPR
9075 && integer_onep (TREE_OPERAND (arg0, 0)))
9076 return fold_build2 (LSHIFT_EXPR, type, arg1,
9077 TREE_OPERAND (arg0, 1));
9079 if (TREE_CODE (arg1) == INTEGER_CST
9080 && 0 != (tem = extract_muldiv (op0,
9081 fold_convert (type, arg1),
9083 return fold_convert (type, tem);
9085 /* Optimize z * conj(z) for integer complex numbers. */
9086 if (TREE_CODE (arg0) == CONJ_EXPR
9087 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9088 return fold_mult_zconjz (type, arg1);
9089 if (TREE_CODE (arg1) == CONJ_EXPR
9090 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9091 return fold_mult_zconjz (type, arg0);
9095 /* Maybe fold x * 0 to 0. The expressions aren't the same
9096 when x is NaN, since x * 0 is also NaN. Nor are they the
9097 same in modes with signed zeros, since multiplying a
9098 negative value by 0 gives -0, not +0. */
9099 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9100 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9101 && real_zerop (arg1))
9102 return omit_one_operand (type, arg1, arg0);
9103 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9104 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9105 && real_onep (arg1))
9106 return non_lvalue (fold_convert (type, arg0));
9108 /* Transform x * -1.0 into -x. */
9109 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9110 && real_minus_onep (arg1))
9111 return fold_convert (type, negate_expr (arg0));
9113 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9114 if (flag_unsafe_math_optimizations
9115 && TREE_CODE (arg0) == RDIV_EXPR
9116 && TREE_CODE (arg1) == REAL_CST
9117 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9119 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9122 return fold_build2 (RDIV_EXPR, type, tem,
9123 TREE_OPERAND (arg0, 1));
9126 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9127 if (operand_equal_p (arg0, arg1, 0))
9129 tree tem = fold_strip_sign_ops (arg0);
9130 if (tem != NULL_TREE)
9132 tem = fold_convert (type, tem);
9133 return fold_build2 (MULT_EXPR, type, tem, tem);
9137 /* Optimize z * conj(z) for floating point complex numbers.
9138 Guarded by flag_unsafe_math_optimizations as non-finite
9139 imaginary components don't produce scalar results. */
9140 if (flag_unsafe_math_optimizations
9141 && TREE_CODE (arg0) == CONJ_EXPR
9142 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9143 return fold_mult_zconjz (type, arg1);
9144 if (flag_unsafe_math_optimizations
9145 && TREE_CODE (arg1) == CONJ_EXPR
9146 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9147 return fold_mult_zconjz (type, arg0);
9149 if (flag_unsafe_math_optimizations)
9151 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9152 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9154 /* Optimizations of root(...)*root(...). */
9155 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9157 tree rootfn, arg, arglist;
9158 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9159 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9161 /* Optimize sqrt(x)*sqrt(x) as x. */
9162 if (BUILTIN_SQRT_P (fcode0)
9163 && operand_equal_p (arg00, arg10, 0)
9164 && ! HONOR_SNANS (TYPE_MODE (type)))
9167 /* Optimize root(x)*root(y) as root(x*y). */
9168 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9169 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9170 arglist = build_tree_list (NULL_TREE, arg);
9171 return build_function_call_expr (rootfn, arglist);
9174 /* Optimize expN(x)*expN(y) as expN(x+y). */
9175 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9177 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9178 tree arg = fold_build2 (PLUS_EXPR, type,
9179 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9180 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9181 tree arglist = build_tree_list (NULL_TREE, arg);
9182 return build_function_call_expr (expfn, arglist);
9185 /* Optimizations of pow(...)*pow(...). */
9186 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9187 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9188 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9190 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9191 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9193 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9194 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9197 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9198 if (operand_equal_p (arg01, arg11, 0))
9200 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9201 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9202 tree arglist = tree_cons (NULL_TREE, arg,
9203 build_tree_list (NULL_TREE,
9205 return build_function_call_expr (powfn, arglist);
9208 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9209 if (operand_equal_p (arg00, arg10, 0))
9211 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9212 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9213 tree arglist = tree_cons (NULL_TREE, arg00,
9214 build_tree_list (NULL_TREE,
9216 return build_function_call_expr (powfn, arglist);
9220 /* Optimize tan(x)*cos(x) as sin(x). */
9221 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9222 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9223 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9224 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9225 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9226 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9227 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9228 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9230 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9232 if (sinfn != NULL_TREE)
9233 return build_function_call_expr (sinfn,
9234 TREE_OPERAND (arg0, 1));
9237 /* Optimize x*pow(x,c) as pow(x,c+1). */
9238 if (fcode1 == BUILT_IN_POW
9239 || fcode1 == BUILT_IN_POWF
9240 || fcode1 == BUILT_IN_POWL)
9242 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9243 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9245 if (TREE_CODE (arg11) == REAL_CST
9246 && ! TREE_CONSTANT_OVERFLOW (arg11)
9247 && operand_equal_p (arg0, arg10, 0))
9249 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9253 c = TREE_REAL_CST (arg11);
9254 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9255 arg = build_real (type, c);
9256 arglist = build_tree_list (NULL_TREE, arg);
9257 arglist = tree_cons (NULL_TREE, arg0, arglist);
9258 return build_function_call_expr (powfn, arglist);
9262 /* Optimize pow(x,c)*x as pow(x,c+1). */
9263 if (fcode0 == BUILT_IN_POW
9264 || fcode0 == BUILT_IN_POWF
9265 || fcode0 == BUILT_IN_POWL)
9267 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9268 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9270 if (TREE_CODE (arg01) == REAL_CST
9271 && ! TREE_CONSTANT_OVERFLOW (arg01)
9272 && operand_equal_p (arg1, arg00, 0))
9274 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9278 c = TREE_REAL_CST (arg01);
9279 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9280 arg = build_real (type, c);
9281 arglist = build_tree_list (NULL_TREE, arg);
9282 arglist = tree_cons (NULL_TREE, arg1, arglist);
9283 return build_function_call_expr (powfn, arglist);
9287 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9289 && operand_equal_p (arg0, arg1, 0))
9291 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9295 tree arg = build_real (type, dconst2);
9296 tree arglist = build_tree_list (NULL_TREE, arg);
9297 arglist = tree_cons (NULL_TREE, arg0, arglist);
9298 return build_function_call_expr (powfn, arglist);
9307 if (integer_all_onesp (arg1))
9308 return omit_one_operand (type, arg1, arg0);
9309 if (integer_zerop (arg1))
9310 return non_lvalue (fold_convert (type, arg0));
9311 if (operand_equal_p (arg0, arg1, 0))
9312 return non_lvalue (fold_convert (type, arg0));
9315 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9316 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9318 t1 = build_int_cst (type, -1);
9319 t1 = force_fit_type (t1, 0, false, false);
9320 return omit_one_operand (type, t1, arg1);
9324 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9325 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9327 t1 = build_int_cst (type, -1);
9328 t1 = force_fit_type (t1, 0, false, false);
9329 return omit_one_operand (type, t1, arg0);
9332 /* Canonicalize (X & C1) | C2. */
9333 if (TREE_CODE (arg0) == BIT_AND_EXPR
9334 && TREE_CODE (arg1) == INTEGER_CST
9335 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9337 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9338 int width = TYPE_PRECISION (type);
9339 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9340 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9341 hi2 = TREE_INT_CST_HIGH (arg1);
9342 lo2 = TREE_INT_CST_LOW (arg1);
9344 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9345 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9346 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9348 if (width > HOST_BITS_PER_WIDE_INT)
9350 mhi = (unsigned HOST_WIDE_INT) -1
9351 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9357 mlo = (unsigned HOST_WIDE_INT) -1
9358 >> (HOST_BITS_PER_WIDE_INT - width);
9361 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9362 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9363 return fold_build2 (BIT_IOR_EXPR, type,
9364 TREE_OPERAND (arg0, 0), arg1);
9366 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9369 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9370 return fold_build2 (BIT_IOR_EXPR, type,
9371 fold_build2 (BIT_AND_EXPR, type,
9372 TREE_OPERAND (arg0, 0),
9373 build_int_cst_wide (type,
9379 /* (X & Y) | Y is (X, Y). */
9380 if (TREE_CODE (arg0) == BIT_AND_EXPR
9381 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9382 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9383 /* (X & Y) | X is (Y, X). */
9384 if (TREE_CODE (arg0) == BIT_AND_EXPR
9385 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9386 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9387 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9388 /* X | (X & Y) is (Y, X). */
9389 if (TREE_CODE (arg1) == BIT_AND_EXPR
9390 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9391 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9392 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9393 /* X | (Y & X) is (Y, X). */
9394 if (TREE_CODE (arg1) == BIT_AND_EXPR
9395 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9396 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9397 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9399 t1 = distribute_bit_expr (code, type, arg0, arg1);
9400 if (t1 != NULL_TREE)
9403 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9405 This results in more efficient code for machines without a NAND
9406 instruction. Combine will canonicalize to the first form
9407 which will allow use of NAND instructions provided by the
9408 backend if they exist. */
9409 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9410 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9412 return fold_build1 (BIT_NOT_EXPR, type,
9413 build2 (BIT_AND_EXPR, type,
9414 TREE_OPERAND (arg0, 0),
9415 TREE_OPERAND (arg1, 0)));
9418 /* See if this can be simplified into a rotate first. If that
9419 is unsuccessful continue in the association code. */
9423 if (integer_zerop (arg1))
9424 return non_lvalue (fold_convert (type, arg0));
9425 if (integer_all_onesp (arg1))
9426 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9427 if (operand_equal_p (arg0, arg1, 0))
9428 return omit_one_operand (type, integer_zero_node, arg0);
9431 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9432 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9434 t1 = build_int_cst (type, -1);
9435 t1 = force_fit_type (t1, 0, false, false);
9436 return omit_one_operand (type, t1, arg1);
9440 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9441 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9443 t1 = build_int_cst (type, -1);
9444 t1 = force_fit_type (t1, 0, false, false);
9445 return omit_one_operand (type, t1, arg0);
9448 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9449 with a constant, and the two constants have no bits in common,
9450 we should treat this as a BIT_IOR_EXPR since this may produce more
9452 if (TREE_CODE (arg0) == BIT_AND_EXPR
9453 && TREE_CODE (arg1) == BIT_AND_EXPR
9454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9455 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9456 && integer_zerop (const_binop (BIT_AND_EXPR,
9457 TREE_OPERAND (arg0, 1),
9458 TREE_OPERAND (arg1, 1), 0)))
9460 code = BIT_IOR_EXPR;
9464 /* (X | Y) ^ X -> Y & ~ X*/
9465 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9466 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9468 tree t2 = TREE_OPERAND (arg0, 1);
9469 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9471 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9472 fold_convert (type, t1));
9476 /* (Y | X) ^ X -> Y & ~ X*/
9477 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9478 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9480 tree t2 = TREE_OPERAND (arg0, 0);
9481 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9483 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9484 fold_convert (type, t1));
9488 /* X ^ (X | Y) -> Y & ~ X*/
9489 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9490 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9492 tree t2 = TREE_OPERAND (arg1, 1);
9493 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9495 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9496 fold_convert (type, t1));
9500 /* X ^ (Y | X) -> Y & ~ X*/
9501 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9502 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9504 tree t2 = TREE_OPERAND (arg1, 0);
9505 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9507 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9508 fold_convert (type, t1));
9512 /* Convert ~X ^ ~Y to X ^ Y. */
9513 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9514 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9515 return fold_build2 (code, type,
9516 fold_convert (type, TREE_OPERAND (arg0, 0)),
9517 fold_convert (type, TREE_OPERAND (arg1, 0)));
9519 /* Convert ~X ^ C to X ^ ~C. */
9520 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9521 && TREE_CODE (arg1) == INTEGER_CST)
9522 return fold_build2 (code, type,
9523 fold_convert (type, TREE_OPERAND (arg0, 0)),
9524 fold_build1 (BIT_NOT_EXPR, type, arg1));
9526 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9527 if (TREE_CODE (arg0) == BIT_AND_EXPR
9528 && integer_onep (TREE_OPERAND (arg0, 1))
9529 && integer_onep (arg1))
9530 return fold_build2 (EQ_EXPR, type, arg0,
9531 build_int_cst (TREE_TYPE (arg0), 0));
9533 /* Fold (X & Y) ^ Y as ~X & Y. */
9534 if (TREE_CODE (arg0) == BIT_AND_EXPR
9535 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9537 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9538 return fold_build2 (BIT_AND_EXPR, type,
9539 fold_build1 (BIT_NOT_EXPR, type, tem),
9540 fold_convert (type, arg1));
9542 /* Fold (X & Y) ^ X as ~Y & X. */
9543 if (TREE_CODE (arg0) == BIT_AND_EXPR
9544 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9545 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9547 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9548 return fold_build2 (BIT_AND_EXPR, type,
9549 fold_build1 (BIT_NOT_EXPR, type, tem),
9550 fold_convert (type, arg1));
9552 /* Fold X ^ (X & Y) as X & ~Y. */
9553 if (TREE_CODE (arg1) == BIT_AND_EXPR
9554 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9556 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9557 return fold_build2 (BIT_AND_EXPR, type,
9558 fold_convert (type, arg0),
9559 fold_build1 (BIT_NOT_EXPR, type, tem));
9561 /* Fold X ^ (Y & X) as ~Y & X. */
9562 if (TREE_CODE (arg1) == BIT_AND_EXPR
9563 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9564 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9566 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9567 return fold_build2 (BIT_AND_EXPR, type,
9568 fold_build1 (BIT_NOT_EXPR, type, tem),
9569 fold_convert (type, arg0));
9572 /* See if this can be simplified into a rotate first. If that
9573 is unsuccessful continue in the association code. */
9577 if (integer_all_onesp (arg1))
9578 return non_lvalue (fold_convert (type, arg0));
9579 if (integer_zerop (arg1))
9580 return omit_one_operand (type, arg1, arg0);
9581 if (operand_equal_p (arg0, arg1, 0))
9582 return non_lvalue (fold_convert (type, arg0));
9584 /* ~X & X is always zero. */
9585 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9586 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9587 return omit_one_operand (type, integer_zero_node, arg1);
9589 /* X & ~X is always zero. */
9590 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9591 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9592 return omit_one_operand (type, integer_zero_node, arg0);
9594 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9595 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9596 && TREE_CODE (arg1) == INTEGER_CST
9597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9598 return fold_build2 (BIT_IOR_EXPR, type,
9599 fold_build2 (BIT_AND_EXPR, type,
9600 TREE_OPERAND (arg0, 0), arg1),
9601 fold_build2 (BIT_AND_EXPR, type,
9602 TREE_OPERAND (arg0, 1), arg1));
9604 /* (X | Y) & Y is (X, Y). */
9605 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9606 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9607 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9608 /* (X | Y) & X is (Y, X). */
9609 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9610 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9611 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9612 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9613 /* X & (X | Y) is (Y, X). */
9614 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9615 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9616 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9617 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9618 /* X & (Y | X) is (Y, X). */
9619 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9620 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9621 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9622 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9624 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9625 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9626 && integer_onep (TREE_OPERAND (arg0, 1))
9627 && integer_onep (arg1))
9629 tem = TREE_OPERAND (arg0, 0);
9630 return fold_build2 (EQ_EXPR, type,
9631 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9632 build_int_cst (TREE_TYPE (tem), 1)),
9633 build_int_cst (TREE_TYPE (tem), 0));
9635 /* Fold ~X & 1 as (X & 1) == 0. */
9636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9637 && integer_onep (arg1))
9639 tem = TREE_OPERAND (arg0, 0);
9640 return fold_build2 (EQ_EXPR, type,
9641 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9642 build_int_cst (TREE_TYPE (tem), 1)),
9643 build_int_cst (TREE_TYPE (tem), 0));
9646 /* Fold (X ^ Y) & Y as ~X & Y. */
9647 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9648 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9650 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9651 return fold_build2 (BIT_AND_EXPR, type,
9652 fold_build1 (BIT_NOT_EXPR, type, tem),
9653 fold_convert (type, arg1));
9655 /* Fold (X ^ Y) & X as ~Y & X. */
9656 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9657 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9658 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9660 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9661 return fold_build2 (BIT_AND_EXPR, type,
9662 fold_build1 (BIT_NOT_EXPR, type, tem),
9663 fold_convert (type, arg1));
9665 /* Fold X & (X ^ Y) as X & ~Y. */
9666 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9667 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9669 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9670 return fold_build2 (BIT_AND_EXPR, type,
9671 fold_convert (type, arg0),
9672 fold_build1 (BIT_NOT_EXPR, type, tem));
9674 /* Fold X & (Y ^ X) as ~Y & X. */
9675 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9676 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9677 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9679 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9680 return fold_build2 (BIT_AND_EXPR, type,
9681 fold_build1 (BIT_NOT_EXPR, type, tem),
9682 fold_convert (type, arg0));
9685 t1 = distribute_bit_expr (code, type, arg0, arg1);
9686 if (t1 != NULL_TREE)
9688 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9689 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9690 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9693 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9695 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9696 && (~TREE_INT_CST_LOW (arg1)
9697 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9698 return fold_convert (type, TREE_OPERAND (arg0, 0));
9701 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9703 This results in more efficient code for machines without a NOR
9704 instruction. Combine will canonicalize to the first form
9705 which will allow use of NOR instructions provided by the
9706 backend if they exist. */
9707 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9708 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9710 return fold_build1 (BIT_NOT_EXPR, type,
9711 build2 (BIT_IOR_EXPR, type,
9712 TREE_OPERAND (arg0, 0),
9713 TREE_OPERAND (arg1, 0)));
9719 /* Don't touch a floating-point divide by zero unless the mode
9720 of the constant can represent infinity. */
9721 if (TREE_CODE (arg1) == REAL_CST
9722 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9723 && real_zerop (arg1))
9726 /* Optimize A / A to 1.0 if we don't care about
9727 NaNs or Infinities. Skip the transformation
9728 for non-real operands. */
9729 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9730 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9731 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9732 && operand_equal_p (arg0, arg1, 0))
9734 tree r = build_real (TREE_TYPE (arg0), dconst1);
9736 return omit_two_operands (type, r, arg0, arg1);
9739 /* The complex version of the above A / A optimization. */
9740 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9741 && operand_equal_p (arg0, arg1, 0))
9743 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9744 if (! HONOR_NANS (TYPE_MODE (elem_type))
9745 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9747 tree r = build_real (elem_type, dconst1);
9748 /* omit_two_operands will call fold_convert for us. */
9749 return omit_two_operands (type, r, arg0, arg1);
9753 /* (-A) / (-B) -> A / B */
9754 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9755 return fold_build2 (RDIV_EXPR, type,
9756 TREE_OPERAND (arg0, 0),
9757 negate_expr (arg1));
9758 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9759 return fold_build2 (RDIV_EXPR, type,
9761 TREE_OPERAND (arg1, 0));
9763 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9764 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9765 && real_onep (arg1))
9766 return non_lvalue (fold_convert (type, arg0));
9768 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9769 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9770 && real_minus_onep (arg1))
9771 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9773 /* If ARG1 is a constant, we can convert this to a multiply by the
9774 reciprocal. This does not have the same rounding properties,
9775 so only do this if -funsafe-math-optimizations. We can actually
9776 always safely do it if ARG1 is a power of two, but it's hard to
9777 tell if it is or not in a portable manner. */
9778 if (TREE_CODE (arg1) == REAL_CST)
9780 if (flag_unsafe_math_optimizations
9781 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9783 return fold_build2 (MULT_EXPR, type, arg0, tem);
9784 /* Find the reciprocal if optimizing and the result is exact. */
9788 r = TREE_REAL_CST (arg1);
9789 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9791 tem = build_real (type, r);
9792 return fold_build2 (MULT_EXPR, type,
9793 fold_convert (type, arg0), tem);
9797 /* Convert A/B/C to A/(B*C). */
9798 if (flag_unsafe_math_optimizations
9799 && TREE_CODE (arg0) == RDIV_EXPR)
9800 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9801 fold_build2 (MULT_EXPR, type,
9802 TREE_OPERAND (arg0, 1), arg1));
9804 /* Convert A/(B/C) to (A/B)*C. */
9805 if (flag_unsafe_math_optimizations
9806 && TREE_CODE (arg1) == RDIV_EXPR)
9807 return fold_build2 (MULT_EXPR, type,
9808 fold_build2 (RDIV_EXPR, type, arg0,
9809 TREE_OPERAND (arg1, 0)),
9810 TREE_OPERAND (arg1, 1));
9812 /* Convert C1/(X*C2) into (C1/C2)/X. */
9813 if (flag_unsafe_math_optimizations
9814 && TREE_CODE (arg1) == MULT_EXPR
9815 && TREE_CODE (arg0) == REAL_CST
9816 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9818 tree tem = const_binop (RDIV_EXPR, arg0,
9819 TREE_OPERAND (arg1, 1), 0);
9821 return fold_build2 (RDIV_EXPR, type, tem,
9822 TREE_OPERAND (arg1, 0));
9825 if (flag_unsafe_math_optimizations)
9827 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9828 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9830 /* Optimize sin(x)/cos(x) as tan(x). */
9831 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9832 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9833 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9834 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9835 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9837 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9839 if (tanfn != NULL_TREE)
9840 return build_function_call_expr (tanfn,
9841 TREE_OPERAND (arg0, 1));
9844 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9845 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9846 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9847 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9848 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9849 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9851 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9853 if (tanfn != NULL_TREE)
9855 tree tmp = TREE_OPERAND (arg0, 1);
9856 tmp = build_function_call_expr (tanfn, tmp);
9857 return fold_build2 (RDIV_EXPR, type,
9858 build_real (type, dconst1), tmp);
9862 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9863 NaNs or Infinities. */
9864 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9865 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9866 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9868 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9869 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9871 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9872 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9873 && operand_equal_p (arg00, arg01, 0))
9875 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9877 if (cosfn != NULL_TREE)
9878 return build_function_call_expr (cosfn,
9879 TREE_OPERAND (arg0, 1));
9883 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9884 NaNs or Infinities. */
9885 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9886 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9887 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9889 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9890 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9892 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9893 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9894 && operand_equal_p (arg00, arg01, 0))
9896 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9898 if (cosfn != NULL_TREE)
9900 tree tmp = TREE_OPERAND (arg0, 1);
9901 tmp = build_function_call_expr (cosfn, tmp);
9902 return fold_build2 (RDIV_EXPR, type,
9903 build_real (type, dconst1),
9909 /* Optimize pow(x,c)/x as pow(x,c-1). */
9910 if (fcode0 == BUILT_IN_POW
9911 || fcode0 == BUILT_IN_POWF
9912 || fcode0 == BUILT_IN_POWL)
9914 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9915 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9916 if (TREE_CODE (arg01) == REAL_CST
9917 && ! TREE_CONSTANT_OVERFLOW (arg01)
9918 && operand_equal_p (arg1, arg00, 0))
9920 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9924 c = TREE_REAL_CST (arg01);
9925 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9926 arg = build_real (type, c);
9927 arglist = build_tree_list (NULL_TREE, arg);
9928 arglist = tree_cons (NULL_TREE, arg1, arglist);
9929 return build_function_call_expr (powfn, arglist);
9933 /* Optimize x/expN(y) into x*expN(-y). */
9934 if (BUILTIN_EXPONENT_P (fcode1))
9936 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9937 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9938 tree arglist = build_tree_list (NULL_TREE,
9939 fold_convert (type, arg));
9940 arg1 = build_function_call_expr (expfn, arglist);
9941 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9944 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9945 if (fcode1 == BUILT_IN_POW
9946 || fcode1 == BUILT_IN_POWF
9947 || fcode1 == BUILT_IN_POWL)
9949 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9950 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9951 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9952 tree neg11 = fold_convert (type, negate_expr (arg11));
9953 tree arglist = tree_cons(NULL_TREE, arg10,
9954 build_tree_list (NULL_TREE, neg11));
9955 arg1 = build_function_call_expr (powfn, arglist);
9956 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9961 case TRUNC_DIV_EXPR:
9962 case FLOOR_DIV_EXPR:
9963 /* Simplify A / (B << N) where A and B are positive and B is
9964 a power of 2, to A >> (N + log2(B)). */
9965 if (TREE_CODE (arg1) == LSHIFT_EXPR
9966 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9968 tree sval = TREE_OPERAND (arg1, 0);
9969 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9971 tree sh_cnt = TREE_OPERAND (arg1, 1);
9972 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9974 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9975 sh_cnt, build_int_cst (NULL_TREE, pow2));
9976 return fold_build2 (RSHIFT_EXPR, type,
9977 fold_convert (type, arg0), sh_cnt);
9982 case ROUND_DIV_EXPR:
9984 case EXACT_DIV_EXPR:
9985 if (integer_onep (arg1))
9986 return non_lvalue (fold_convert (type, arg0));
9987 if (integer_zerop (arg1))
9990 if (!TYPE_UNSIGNED (type)
9991 && TREE_CODE (arg1) == INTEGER_CST
9992 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9993 && TREE_INT_CST_HIGH (arg1) == -1)
9994 return fold_convert (type, negate_expr (arg0));
9996 /* Convert -A / -B to A / B when the type is signed and overflow is
9998 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9999 && TREE_CODE (arg0) == NEGATE_EXPR
10000 && negate_expr_p (arg1))
10001 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10002 negate_expr (arg1));
10003 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10004 && TREE_CODE (arg1) == NEGATE_EXPR
10005 && negate_expr_p (arg0))
10006 return fold_build2 (code, type, negate_expr (arg0),
10007 TREE_OPERAND (arg1, 0));
10009 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10010 operation, EXACT_DIV_EXPR.
10012 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10013 At one time others generated faster code, it's not clear if they do
10014 after the last round to changes to the DIV code in expmed.c. */
10015 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10016 && multiple_of_p (type, arg0, arg1))
10017 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10019 if (TREE_CODE (arg1) == INTEGER_CST
10020 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10021 return fold_convert (type, tem);
10025 case CEIL_MOD_EXPR:
10026 case FLOOR_MOD_EXPR:
10027 case ROUND_MOD_EXPR:
10028 case TRUNC_MOD_EXPR:
10029 /* X % 1 is always zero, but be sure to preserve any side
10031 if (integer_onep (arg1))
10032 return omit_one_operand (type, integer_zero_node, arg0);
10034 /* X % 0, return X % 0 unchanged so that we can get the
10035 proper warnings and errors. */
10036 if (integer_zerop (arg1))
10039 /* 0 % X is always zero, but be sure to preserve any side
10040 effects in X. Place this after checking for X == 0. */
10041 if (integer_zerop (arg0))
10042 return omit_one_operand (type, integer_zero_node, arg1);
10044 /* X % -1 is zero. */
10045 if (!TYPE_UNSIGNED (type)
10046 && TREE_CODE (arg1) == INTEGER_CST
10047 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10048 && TREE_INT_CST_HIGH (arg1) == -1)
10049 return omit_one_operand (type, integer_zero_node, arg0);
10051 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10052 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10053 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10054 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10057 /* Also optimize A % (C << N) where C is a power of 2,
10058 to A & ((C << N) - 1). */
10059 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10060 c = TREE_OPERAND (arg1, 0);
10062 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10064 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10065 arg1, integer_one_node);
10066 return fold_build2 (BIT_AND_EXPR, type,
10067 fold_convert (type, arg0),
10068 fold_convert (type, mask));
10072 /* X % -C is the same as X % C. */
10073 if (code == TRUNC_MOD_EXPR
10074 && !TYPE_UNSIGNED (type)
10075 && TREE_CODE (arg1) == INTEGER_CST
10076 && !TREE_CONSTANT_OVERFLOW (arg1)
10077 && TREE_INT_CST_HIGH (arg1) < 0
10079 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10080 && !sign_bit_p (arg1, arg1))
10081 return fold_build2 (code, type, fold_convert (type, arg0),
10082 fold_convert (type, negate_expr (arg1)));
10084 /* X % -Y is the same as X % Y. */
10085 if (code == TRUNC_MOD_EXPR
10086 && !TYPE_UNSIGNED (type)
10087 && TREE_CODE (arg1) == NEGATE_EXPR
10089 return fold_build2 (code, type, fold_convert (type, arg0),
10090 fold_convert (type, TREE_OPERAND (arg1, 0)));
10092 if (TREE_CODE (arg1) == INTEGER_CST
10093 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10094 return fold_convert (type, tem);
10100 if (integer_all_onesp (arg0))
10101 return omit_one_operand (type, arg0, arg1);
10105 /* Optimize -1 >> x for arithmetic right shifts. */
10106 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10107 return omit_one_operand (type, arg0, arg1);
10108 /* ... fall through ... */
10112 if (integer_zerop (arg1))
10113 return non_lvalue (fold_convert (type, arg0));
10114 if (integer_zerop (arg0))
10115 return omit_one_operand (type, arg0, arg1);
10117 /* Since negative shift count is not well-defined,
10118 don't try to compute it in the compiler. */
10119 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10122 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10123 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10124 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10125 && host_integerp (TREE_OPERAND (arg0, 1), false)
10126 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10128 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10129 + TREE_INT_CST_LOW (arg1));
10131 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10132 being well defined. */
10133 if (low >= TYPE_PRECISION (type))
10135 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10136 low = low % TYPE_PRECISION (type);
10137 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10138 return build_int_cst (type, 0);
10140 low = TYPE_PRECISION (type) - 1;
10143 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10144 build_int_cst (type, low));
10147 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10148 into x & ((unsigned)-1 >> c) for unsigned types. */
10149 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10150 || (TYPE_UNSIGNED (type)
10151 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10152 && host_integerp (arg1, false)
10153 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10154 && host_integerp (TREE_OPERAND (arg0, 1), false)
10155 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10157 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10158 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10164 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10166 lshift = build_int_cst (type, -1);
10167 lshift = int_const_binop (code, lshift, arg1, 0);
10169 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10173 /* Rewrite an LROTATE_EXPR by a constant into an
10174 RROTATE_EXPR by a new constant. */
10175 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10177 tree tem = build_int_cst (NULL_TREE,
10178 GET_MODE_BITSIZE (TYPE_MODE (type)));
10179 tem = fold_convert (TREE_TYPE (arg1), tem);
10180 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10181 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10184 /* If we have a rotate of a bit operation with the rotate count and
10185 the second operand of the bit operation both constant,
10186 permute the two operations. */
10187 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10188 && (TREE_CODE (arg0) == BIT_AND_EXPR
10189 || TREE_CODE (arg0) == BIT_IOR_EXPR
10190 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10191 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10192 return fold_build2 (TREE_CODE (arg0), type,
10193 fold_build2 (code, type,
10194 TREE_OPERAND (arg0, 0), arg1),
10195 fold_build2 (code, type,
10196 TREE_OPERAND (arg0, 1), arg1));
10198 /* Two consecutive rotates adding up to the width of the mode can
10200 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10201 && TREE_CODE (arg0) == RROTATE_EXPR
10202 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10203 && TREE_INT_CST_HIGH (arg1) == 0
10204 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10205 && ((TREE_INT_CST_LOW (arg1)
10206 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10207 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10208 return TREE_OPERAND (arg0, 0);
10213 if (operand_equal_p (arg0, arg1, 0))
10214 return omit_one_operand (type, arg0, arg1);
10215 if (INTEGRAL_TYPE_P (type)
10216 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10217 return omit_one_operand (type, arg1, arg0);
10218 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10224 if (operand_equal_p (arg0, arg1, 0))
10225 return omit_one_operand (type, arg0, arg1);
10226 if (INTEGRAL_TYPE_P (type)
10227 && TYPE_MAX_VALUE (type)
10228 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10229 return omit_one_operand (type, arg1, arg0);
10230 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10235 case TRUTH_ANDIF_EXPR:
10236 /* Note that the operands of this must be ints
10237 and their values must be 0 or 1.
10238 ("true" is a fixed value perhaps depending on the language.) */
10239 /* If first arg is constant zero, return it. */
10240 if (integer_zerop (arg0))
10241 return fold_convert (type, arg0);
10242 case TRUTH_AND_EXPR:
10243 /* If either arg is constant true, drop it. */
10244 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10245 return non_lvalue (fold_convert (type, arg1));
10246 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10247 /* Preserve sequence points. */
10248 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10249 return non_lvalue (fold_convert (type, arg0));
10250 /* If second arg is constant zero, result is zero, but first arg
10251 must be evaluated. */
10252 if (integer_zerop (arg1))
10253 return omit_one_operand (type, arg1, arg0);
10254 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10255 case will be handled here. */
10256 if (integer_zerop (arg0))
10257 return omit_one_operand (type, arg0, arg1);
10259 /* !X && X is always false. */
10260 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10262 return omit_one_operand (type, integer_zero_node, arg1);
10263 /* X && !X is always false. */
10264 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10265 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10266 return omit_one_operand (type, integer_zero_node, arg0);
10268 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10269 means A >= Y && A != MAX, but in this case we know that
10272 if (!TREE_SIDE_EFFECTS (arg0)
10273 && !TREE_SIDE_EFFECTS (arg1))
10275 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10276 if (tem && !operand_equal_p (tem, arg0, 0))
10277 return fold_build2 (code, type, tem, arg1);
10279 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10280 if (tem && !operand_equal_p (tem, arg1, 0))
10281 return fold_build2 (code, type, arg0, tem);
10285 /* We only do these simplifications if we are optimizing. */
10289 /* Check for things like (A || B) && (A || C). We can convert this
10290 to A || (B && C). Note that either operator can be any of the four
10291 truth and/or operations and the transformation will still be
10292 valid. Also note that we only care about order for the
10293 ANDIF and ORIF operators. If B contains side effects, this
10294 might change the truth-value of A. */
10295 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10296 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10297 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10298 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10299 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10300 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10302 tree a00 = TREE_OPERAND (arg0, 0);
10303 tree a01 = TREE_OPERAND (arg0, 1);
10304 tree a10 = TREE_OPERAND (arg1, 0);
10305 tree a11 = TREE_OPERAND (arg1, 1);
10306 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10307 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10308 && (code == TRUTH_AND_EXPR
10309 || code == TRUTH_OR_EXPR));
10311 if (operand_equal_p (a00, a10, 0))
10312 return fold_build2 (TREE_CODE (arg0), type, a00,
10313 fold_build2 (code, type, a01, a11));
10314 else if (commutative && operand_equal_p (a00, a11, 0))
10315 return fold_build2 (TREE_CODE (arg0), type, a00,
10316 fold_build2 (code, type, a01, a10));
10317 else if (commutative && operand_equal_p (a01, a10, 0))
10318 return fold_build2 (TREE_CODE (arg0), type, a01,
10319 fold_build2 (code, type, a00, a11));
10321 /* This case if tricky because we must either have commutative
10322 operators or else A10 must not have side-effects. */
10324 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10325 && operand_equal_p (a01, a11, 0))
10326 return fold_build2 (TREE_CODE (arg0), type,
10327 fold_build2 (code, type, a00, a10),
10331 /* See if we can build a range comparison. */
10332 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10335 /* Check for the possibility of merging component references. If our
10336 lhs is another similar operation, try to merge its rhs with our
10337 rhs. Then try to merge our lhs and rhs. */
10338 if (TREE_CODE (arg0) == code
10339 && 0 != (tem = fold_truthop (code, type,
10340 TREE_OPERAND (arg0, 1), arg1)))
10341 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10343 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10348 case TRUTH_ORIF_EXPR:
10349 /* Note that the operands of this must be ints
10350 and their values must be 0 or true.
10351 ("true" is a fixed value perhaps depending on the language.) */
10352 /* If first arg is constant true, return it. */
10353 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10354 return fold_convert (type, arg0);
10355 case TRUTH_OR_EXPR:
10356 /* If either arg is constant zero, drop it. */
10357 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10358 return non_lvalue (fold_convert (type, arg1));
10359 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10360 /* Preserve sequence points. */
10361 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10362 return non_lvalue (fold_convert (type, arg0));
10363 /* If second arg is constant true, result is true, but we must
10364 evaluate first arg. */
10365 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10366 return omit_one_operand (type, arg1, arg0);
10367 /* Likewise for first arg, but note this only occurs here for
10369 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10370 return omit_one_operand (type, arg0, arg1);
10372 /* !X || X is always true. */
10373 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10374 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10375 return omit_one_operand (type, integer_one_node, arg1);
10376 /* X || !X is always true. */
10377 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10378 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10379 return omit_one_operand (type, integer_one_node, arg0);
10383 case TRUTH_XOR_EXPR:
10384 /* If the second arg is constant zero, drop it. */
10385 if (integer_zerop (arg1))
10386 return non_lvalue (fold_convert (type, arg0));
10387 /* If the second arg is constant true, this is a logical inversion. */
10388 if (integer_onep (arg1))
10390 /* Only call invert_truthvalue if operand is a truth value. */
10391 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10392 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10394 tem = invert_truthvalue (arg0);
10395 return non_lvalue (fold_convert (type, tem));
10397 /* Identical arguments cancel to zero. */
10398 if (operand_equal_p (arg0, arg1, 0))
10399 return omit_one_operand (type, integer_zero_node, arg0);
10401 /* !X ^ X is always true. */
10402 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10403 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10404 return omit_one_operand (type, integer_one_node, arg1);
10406 /* X ^ !X is always true. */
10407 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10408 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10409 return omit_one_operand (type, integer_one_node, arg0);
10415 tem = fold_comparison (code, type, op0, op1);
10416 if (tem != NULL_TREE)
10419 /* bool_var != 0 becomes bool_var. */
10420 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10421 && code == NE_EXPR)
10422 return non_lvalue (fold_convert (type, arg0));
10424 /* bool_var == 1 becomes bool_var. */
10425 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10426 && code == EQ_EXPR)
10427 return non_lvalue (fold_convert (type, arg0));
10429 /* bool_var != 1 becomes !bool_var. */
10430 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10431 && code == NE_EXPR)
10432 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10434 /* bool_var == 0 becomes !bool_var. */
10435 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10436 && code == EQ_EXPR)
10437 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10439 /* If this is an equality comparison of the address of a non-weak
10440 object against zero, then we know the result. */
10441 if (TREE_CODE (arg0) == ADDR_EXPR
10442 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10443 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10444 && integer_zerop (arg1))
10445 return constant_boolean_node (code != EQ_EXPR, type);
10447 /* If this is an equality comparison of the address of two non-weak,
10448 unaliased symbols neither of which are extern (since we do not
10449 have access to attributes for externs), then we know the result. */
10450 if (TREE_CODE (arg0) == ADDR_EXPR
10451 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10452 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10453 && ! lookup_attribute ("alias",
10454 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10455 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10456 && TREE_CODE (arg1) == ADDR_EXPR
10457 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10458 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10459 && ! lookup_attribute ("alias",
10460 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10461 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10463 /* We know that we're looking at the address of two
10464 non-weak, unaliased, static _DECL nodes.
10466 It is both wasteful and incorrect to call operand_equal_p
10467 to compare the two ADDR_EXPR nodes. It is wasteful in that
10468 all we need to do is test pointer equality for the arguments
10469 to the two ADDR_EXPR nodes. It is incorrect to use
10470 operand_equal_p as that function is NOT equivalent to a
10471 C equality test. It can in fact return false for two
10472 objects which would test as equal using the C equality
10474 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10475 return constant_boolean_node (equal
10476 ? code == EQ_EXPR : code != EQ_EXPR,
10480 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10481 a MINUS_EXPR of a constant, we can convert it into a comparison with
10482 a revised constant as long as no overflow occurs. */
10483 if (TREE_CODE (arg1) == INTEGER_CST
10484 && (TREE_CODE (arg0) == PLUS_EXPR
10485 || TREE_CODE (arg0) == MINUS_EXPR)
10486 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10487 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10488 ? MINUS_EXPR : PLUS_EXPR,
10489 fold_convert (TREE_TYPE (arg0), arg1),
10490 TREE_OPERAND (arg0, 1), 0))
10491 && ! TREE_CONSTANT_OVERFLOW (tem))
10492 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10494 /* Similarly for a NEGATE_EXPR. */
10495 if (TREE_CODE (arg0) == NEGATE_EXPR
10496 && TREE_CODE (arg1) == INTEGER_CST
10497 && 0 != (tem = negate_expr (arg1))
10498 && TREE_CODE (tem) == INTEGER_CST
10499 && ! TREE_CONSTANT_OVERFLOW (tem))
10500 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10502 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10503 for !=. Don't do this for ordered comparisons due to overflow. */
10504 if (TREE_CODE (arg0) == MINUS_EXPR
10505 && integer_zerop (arg1))
10506 return fold_build2 (code, type,
10507 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10509 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10510 if (TREE_CODE (arg0) == ABS_EXPR
10511 && (integer_zerop (arg1) || real_zerop (arg1)))
10512 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10514 /* If this is an EQ or NE comparison with zero and ARG0 is
10515 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10516 two operations, but the latter can be done in one less insn
10517 on machines that have only two-operand insns or on which a
10518 constant cannot be the first operand. */
10519 if (TREE_CODE (arg0) == BIT_AND_EXPR
10520 && integer_zerop (arg1))
10522 tree arg00 = TREE_OPERAND (arg0, 0);
10523 tree arg01 = TREE_OPERAND (arg0, 1);
10524 if (TREE_CODE (arg00) == LSHIFT_EXPR
10525 && integer_onep (TREE_OPERAND (arg00, 0)))
10527 fold_build2 (code, type,
10528 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10529 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10530 arg01, TREE_OPERAND (arg00, 1)),
10531 fold_convert (TREE_TYPE (arg0),
10532 integer_one_node)),
10534 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10535 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10537 fold_build2 (code, type,
10538 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10539 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10540 arg00, TREE_OPERAND (arg01, 1)),
10541 fold_convert (TREE_TYPE (arg0),
10542 integer_one_node)),
10546 /* If this is an NE or EQ comparison of zero against the result of a
10547 signed MOD operation whose second operand is a power of 2, make
10548 the MOD operation unsigned since it is simpler and equivalent. */
10549 if (integer_zerop (arg1)
10550 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10551 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10552 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10553 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10554 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10557 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10558 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10559 fold_convert (newtype,
10560 TREE_OPERAND (arg0, 0)),
10561 fold_convert (newtype,
10562 TREE_OPERAND (arg0, 1)));
10564 return fold_build2 (code, type, newmod,
10565 fold_convert (newtype, arg1));
10568 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10569 C1 is a valid shift constant, and C2 is a power of two, i.e.
10571 if (TREE_CODE (arg0) == BIT_AND_EXPR
10572 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10573 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10575 && integer_pow2p (TREE_OPERAND (arg0, 1))
10576 && integer_zerop (arg1))
10578 tree itype = TREE_TYPE (arg0);
10579 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10580 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10582 /* Check for a valid shift count. */
10583 if (TREE_INT_CST_HIGH (arg001) == 0
10584 && TREE_INT_CST_LOW (arg001) < prec)
10586 tree arg01 = TREE_OPERAND (arg0, 1);
10587 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10588 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10589 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10590 can be rewritten as (X & (C2 << C1)) != 0. */
10591 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10593 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10594 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10595 return fold_build2 (code, type, tem, arg1);
10597 /* Otherwise, for signed (arithmetic) shifts,
10598 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10599 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10600 else if (!TYPE_UNSIGNED (itype))
10601 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10602 arg000, build_int_cst (itype, 0));
10603 /* Otherwise, of unsigned (logical) shifts,
10604 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10605 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10607 return omit_one_operand (type,
10608 code == EQ_EXPR ? integer_one_node
10609 : integer_zero_node,
10614 /* If this is an NE comparison of zero with an AND of one, remove the
10615 comparison since the AND will give the correct value. */
10616 if (code == NE_EXPR
10617 && integer_zerop (arg1)
10618 && TREE_CODE (arg0) == BIT_AND_EXPR
10619 && integer_onep (TREE_OPERAND (arg0, 1)))
10620 return fold_convert (type, arg0);
10622 /* If we have (A & C) == C where C is a power of 2, convert this into
10623 (A & C) != 0. Similarly for NE_EXPR. */
10624 if (TREE_CODE (arg0) == BIT_AND_EXPR
10625 && integer_pow2p (TREE_OPERAND (arg0, 1))
10626 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10627 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10628 arg0, fold_convert (TREE_TYPE (arg0),
10629 integer_zero_node));
10631 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10632 bit, then fold the expression into A < 0 or A >= 0. */
10633 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10637 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10638 Similarly for NE_EXPR. */
10639 if (TREE_CODE (arg0) == BIT_AND_EXPR
10640 && TREE_CODE (arg1) == INTEGER_CST
10641 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10643 tree notc = fold_build1 (BIT_NOT_EXPR,
10644 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10645 TREE_OPERAND (arg0, 1));
10646 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10648 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10649 if (integer_nonzerop (dandnotc))
10650 return omit_one_operand (type, rslt, arg0);
10653 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10654 Similarly for NE_EXPR. */
10655 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10656 && TREE_CODE (arg1) == INTEGER_CST
10657 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10659 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10660 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10661 TREE_OPERAND (arg0, 1), notd);
10662 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10663 if (integer_nonzerop (candnotd))
10664 return omit_one_operand (type, rslt, arg0);
10667 /* If this is a comparison of a field, we may be able to simplify it. */
10668 if (((TREE_CODE (arg0) == COMPONENT_REF
10669 && lang_hooks.can_use_bit_fields_p ())
10670 || TREE_CODE (arg0) == BIT_FIELD_REF)
10671 /* Handle the constant case even without -O
10672 to make sure the warnings are given. */
10673 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10675 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10680 /* Optimize comparisons of strlen vs zero to a compare of the
10681 first character of the string vs zero. To wit,
10682 strlen(ptr) == 0 => *ptr == 0
10683 strlen(ptr) != 0 => *ptr != 0
10684 Other cases should reduce to one of these two (or a constant)
10685 due to the return value of strlen being unsigned. */
10686 if (TREE_CODE (arg0) == CALL_EXPR
10687 && integer_zerop (arg1))
10689 tree fndecl = get_callee_fndecl (arg0);
10693 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10694 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10695 && (arglist = TREE_OPERAND (arg0, 1))
10696 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10697 && ! TREE_CHAIN (arglist))
10699 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10700 return fold_build2 (code, type, iref,
10701 build_int_cst (TREE_TYPE (iref), 0));
10705 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10706 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10707 if (TREE_CODE (arg0) == RSHIFT_EXPR
10708 && integer_zerop (arg1)
10709 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10711 tree arg00 = TREE_OPERAND (arg0, 0);
10712 tree arg01 = TREE_OPERAND (arg0, 1);
10713 tree itype = TREE_TYPE (arg00);
10714 if (TREE_INT_CST_HIGH (arg01) == 0
10715 && TREE_INT_CST_LOW (arg01)
10716 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10718 if (TYPE_UNSIGNED (itype))
10720 itype = lang_hooks.types.signed_type (itype);
10721 arg00 = fold_convert (itype, arg00);
10723 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10724 type, arg00, build_int_cst (itype, 0));
10728 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10729 if (integer_zerop (arg1)
10730 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10731 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10732 TREE_OPERAND (arg0, 1));
10734 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10735 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10736 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10737 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10738 build_int_cst (TREE_TYPE (arg1), 0));
10739 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10740 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10741 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10742 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10743 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10744 build_int_cst (TREE_TYPE (arg1), 0));
10746 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10747 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10748 && TREE_CODE (arg1) == INTEGER_CST
10749 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10750 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10751 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10752 TREE_OPERAND (arg0, 1), arg1));
10754 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10755 (X & C) == 0 when C is a single bit. */
10756 if (TREE_CODE (arg0) == BIT_AND_EXPR
10757 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10758 && integer_zerop (arg1)
10759 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10761 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10762 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10763 TREE_OPERAND (arg0, 1));
10764 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10768 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10769 constant C is a power of two, i.e. a single bit. */
10770 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10771 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10772 && integer_zerop (arg1)
10773 && integer_pow2p (TREE_OPERAND (arg0, 1))
10774 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10775 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10777 tree arg00 = TREE_OPERAND (arg0, 0);
10778 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10779 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10782 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10783 when is C is a power of two, i.e. a single bit. */
10784 if (TREE_CODE (arg0) == BIT_AND_EXPR
10785 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10786 && integer_zerop (arg1)
10787 && integer_pow2p (TREE_OPERAND (arg0, 1))
10788 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10789 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10791 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10792 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10793 arg000, TREE_OPERAND (arg0, 1));
10794 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10795 tem, build_int_cst (TREE_TYPE (tem), 0));
10798 if (integer_zerop (arg1)
10799 && tree_expr_nonzero_p (arg0))
10801 tree res = constant_boolean_node (code==NE_EXPR, type);
10802 return omit_one_operand (type, res, arg0);
10805 /* Fold -X op -Y as X op Y, where op is eq/ne. */
10806 if (TREE_CODE (arg0) == NEGATE_EXPR
10807 && TREE_CODE (arg1) == NEGATE_EXPR)
10808 return fold_build2 (code, type,
10809 TREE_OPERAND (arg0, 0),
10810 TREE_OPERAND (arg1, 0));
10818 tem = fold_comparison (code, type, op0, op1);
10819 if (tem != NULL_TREE)
10822 /* Transform comparisons of the form X +- C CMP X. */
10823 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10824 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10825 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10826 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10827 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10828 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10829 && !(flag_wrapv || flag_trapv))))
10831 tree arg01 = TREE_OPERAND (arg0, 1);
10832 enum tree_code code0 = TREE_CODE (arg0);
10835 if (TREE_CODE (arg01) == REAL_CST)
10836 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10838 is_positive = tree_int_cst_sgn (arg01);
10840 /* (X - c) > X becomes false. */
10841 if (code == GT_EXPR
10842 && ((code0 == MINUS_EXPR && is_positive >= 0)
10843 || (code0 == PLUS_EXPR && is_positive <= 0)))
10844 return constant_boolean_node (0, type);
10846 /* Likewise (X + c) < X becomes false. */
10847 if (code == LT_EXPR
10848 && ((code0 == PLUS_EXPR && is_positive >= 0)
10849 || (code0 == MINUS_EXPR && is_positive <= 0)))
10850 return constant_boolean_node (0, type);
10852 /* Convert (X - c) <= X to true. */
10853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10855 && ((code0 == MINUS_EXPR && is_positive >= 0)
10856 || (code0 == PLUS_EXPR && is_positive <= 0)))
10857 return constant_boolean_node (1, type);
10859 /* Convert (X + c) >= X to true. */
10860 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10862 && ((code0 == PLUS_EXPR && is_positive >= 0)
10863 || (code0 == MINUS_EXPR && is_positive <= 0)))
10864 return constant_boolean_node (1, type);
10866 if (TREE_CODE (arg01) == INTEGER_CST)
10868 /* Convert X + c > X and X - c < X to true for integers. */
10869 if (code == GT_EXPR
10870 && ((code0 == PLUS_EXPR && is_positive > 0)
10871 || (code0 == MINUS_EXPR && is_positive < 0)))
10872 return constant_boolean_node (1, type);
10874 if (code == LT_EXPR
10875 && ((code0 == MINUS_EXPR && is_positive > 0)
10876 || (code0 == PLUS_EXPR && is_positive < 0)))
10877 return constant_boolean_node (1, type);
10879 /* Convert X + c <= X and X - c >= X to false for integers. */
10880 if (code == LE_EXPR
10881 && ((code0 == PLUS_EXPR && is_positive > 0)
10882 || (code0 == MINUS_EXPR && is_positive < 0)))
10883 return constant_boolean_node (0, type);
10885 if (code == GE_EXPR
10886 && ((code0 == MINUS_EXPR && is_positive > 0)
10887 || (code0 == PLUS_EXPR && is_positive < 0)))
10888 return constant_boolean_node (0, type);
10892 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10893 This transformation affects the cases which are handled in later
10894 optimizations involving comparisons with non-negative constants. */
10895 if (TREE_CODE (arg1) == INTEGER_CST
10896 && TREE_CODE (arg0) != INTEGER_CST
10897 && tree_int_cst_sgn (arg1) > 0)
10899 if (code == GE_EXPR)
10901 arg1 = const_binop (MINUS_EXPR, arg1,
10902 build_int_cst (TREE_TYPE (arg1), 1), 0);
10903 return fold_build2 (GT_EXPR, type, arg0,
10904 fold_convert (TREE_TYPE (arg0), arg1));
10906 if (code == LT_EXPR)
10908 arg1 = const_binop (MINUS_EXPR, arg1,
10909 build_int_cst (TREE_TYPE (arg1), 1), 0);
10910 return fold_build2 (LE_EXPR, type, arg0,
10911 fold_convert (TREE_TYPE (arg0), arg1));
10915 /* Comparisons with the highest or lowest possible integer of
10916 the specified size will have known values. */
10918 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10920 if (TREE_CODE (arg1) == INTEGER_CST
10921 && ! TREE_CONSTANT_OVERFLOW (arg1)
10922 && width <= 2 * HOST_BITS_PER_WIDE_INT
10923 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10924 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10926 HOST_WIDE_INT signed_max_hi;
10927 unsigned HOST_WIDE_INT signed_max_lo;
10928 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10930 if (width <= HOST_BITS_PER_WIDE_INT)
10932 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10937 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10939 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10945 max_lo = signed_max_lo;
10946 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10952 width -= HOST_BITS_PER_WIDE_INT;
10953 signed_max_lo = -1;
10954 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10959 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10961 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10966 max_hi = signed_max_hi;
10967 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10971 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10972 && TREE_INT_CST_LOW (arg1) == max_lo)
10976 return omit_one_operand (type, integer_zero_node, arg0);
10979 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10982 return omit_one_operand (type, integer_one_node, arg0);
10985 return fold_build2 (NE_EXPR, type, arg0, arg1);
10987 /* The GE_EXPR and LT_EXPR cases above are not normally
10988 reached because of previous transformations. */
10993 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10995 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10999 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11000 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11002 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11003 return fold_build2 (NE_EXPR, type, arg0, arg1);
11007 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11009 && TREE_INT_CST_LOW (arg1) == min_lo)
11013 return omit_one_operand (type, integer_zero_node, arg0);
11016 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11019 return omit_one_operand (type, integer_one_node, arg0);
11022 return fold_build2 (NE_EXPR, type, op0, op1);
11027 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11029 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11033 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11034 return fold_build2 (NE_EXPR, type, arg0, arg1);
11036 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11037 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11042 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11043 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11044 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11045 /* signed_type does not work on pointer types. */
11046 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11048 /* The following case also applies to X < signed_max+1
11049 and X >= signed_max+1 because previous transformations. */
11050 if (code == LE_EXPR || code == GT_EXPR)
11053 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11054 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11055 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11056 type, fold_convert (st0, arg0),
11057 build_int_cst (st1, 0));
11063 /* If we are comparing an ABS_EXPR with a constant, we can
11064 convert all the cases into explicit comparisons, but they may
11065 well not be faster than doing the ABS and one comparison.
11066 But ABS (X) <= C is a range comparison, which becomes a subtraction
11067 and a comparison, and is probably faster. */
11068 if (code == LE_EXPR
11069 && TREE_CODE (arg1) == INTEGER_CST
11070 && TREE_CODE (arg0) == ABS_EXPR
11071 && ! TREE_SIDE_EFFECTS (arg0)
11072 && (0 != (tem = negate_expr (arg1)))
11073 && TREE_CODE (tem) == INTEGER_CST
11074 && ! TREE_CONSTANT_OVERFLOW (tem))
11075 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11076 build2 (GE_EXPR, type,
11077 TREE_OPERAND (arg0, 0), tem),
11078 build2 (LE_EXPR, type,
11079 TREE_OPERAND (arg0, 0), arg1));
11081 /* Convert ABS_EXPR<x> >= 0 to true. */
11082 if (code == GE_EXPR
11083 && tree_expr_nonnegative_p (arg0)
11084 && (integer_zerop (arg1)
11085 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11086 && real_zerop (arg1))))
11087 return omit_one_operand (type, integer_one_node, arg0);
11089 /* Convert ABS_EXPR<x> < 0 to false. */
11090 if (code == LT_EXPR
11091 && tree_expr_nonnegative_p (arg0)
11092 && (integer_zerop (arg1) || real_zerop (arg1)))
11093 return omit_one_operand (type, integer_zero_node, arg0);
11095 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11096 and similarly for >= into !=. */
11097 if ((code == LT_EXPR || code == GE_EXPR)
11098 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11099 && TREE_CODE (arg1) == LSHIFT_EXPR
11100 && integer_onep (TREE_OPERAND (arg1, 0)))
11101 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11102 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11103 TREE_OPERAND (arg1, 1)),
11104 build_int_cst (TREE_TYPE (arg0), 0));
11106 if ((code == LT_EXPR || code == GE_EXPR)
11107 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11108 && (TREE_CODE (arg1) == NOP_EXPR
11109 || TREE_CODE (arg1) == CONVERT_EXPR)
11110 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11111 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11113 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11114 fold_convert (TREE_TYPE (arg0),
11115 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11116 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11118 build_int_cst (TREE_TYPE (arg0), 0));
11122 case UNORDERED_EXPR:
11130 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11132 t1 = fold_relational_const (code, type, arg0, arg1);
11133 if (t1 != NULL_TREE)
11137 /* If the first operand is NaN, the result is constant. */
11138 if (TREE_CODE (arg0) == REAL_CST
11139 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11140 && (code != LTGT_EXPR || ! flag_trapping_math))
11142 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11143 ? integer_zero_node
11144 : integer_one_node;
11145 return omit_one_operand (type, t1, arg1);
11148 /* If the second operand is NaN, the result is constant. */
11149 if (TREE_CODE (arg1) == REAL_CST
11150 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11151 && (code != LTGT_EXPR || ! flag_trapping_math))
11153 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11154 ? integer_zero_node
11155 : integer_one_node;
11156 return omit_one_operand (type, t1, arg0);
11159 /* Simplify unordered comparison of something with itself. */
11160 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11161 && operand_equal_p (arg0, arg1, 0))
11162 return constant_boolean_node (1, type);
11164 if (code == LTGT_EXPR
11165 && !flag_trapping_math
11166 && operand_equal_p (arg0, arg1, 0))
11167 return constant_boolean_node (0, type);
11169 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11171 tree targ0 = strip_float_extensions (arg0);
11172 tree targ1 = strip_float_extensions (arg1);
11173 tree newtype = TREE_TYPE (targ0);
11175 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11176 newtype = TREE_TYPE (targ1);
11178 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11179 return fold_build2 (code, type, fold_convert (newtype, targ0),
11180 fold_convert (newtype, targ1));
11185 case COMPOUND_EXPR:
11186 /* When pedantic, a compound expression can be neither an lvalue
11187 nor an integer constant expression. */
11188 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11190 /* Don't let (0, 0) be null pointer constant. */
11191 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11192 : fold_convert (type, arg1);
11193 return pedantic_non_lvalue (tem);
11196 if ((TREE_CODE (arg0) == REAL_CST
11197 && TREE_CODE (arg1) == REAL_CST)
11198 || (TREE_CODE (arg0) == INTEGER_CST
11199 && TREE_CODE (arg1) == INTEGER_CST))
11200 return build_complex (type, arg0, arg1);
11204 /* An ASSERT_EXPR should never be passed to fold_binary. */
11205 gcc_unreachable ();
11209 } /* switch (code) */
11212 /* Callback for walk_tree, looking for LABEL_EXPR.
11213 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11214 Do not check the sub-tree of GOTO_EXPR. */
11217 contains_label_1 (tree *tp,
11218 int *walk_subtrees,
11219 void *data ATTRIBUTE_UNUSED)
11221 switch (TREE_CODE (*tp))
11226 *walk_subtrees = 0;
11233 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11234 accessible from outside the sub-tree. Returns NULL_TREE if no
11235 addressable label is found. */
11238 contains_label_p (tree st)
11240 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11243 /* Fold a ternary expression of code CODE and type TYPE with operands
11244 OP0, OP1, and OP2. Return the folded expression if folding is
11245 successful. Otherwise, return NULL_TREE. */
11248 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11251 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11252 enum tree_code_class kind = TREE_CODE_CLASS (code);
11254 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11255 && TREE_CODE_LENGTH (code) == 3);
11257 /* Strip any conversions that don't change the mode. This is safe
11258 for every expression, except for a comparison expression because
11259 its signedness is derived from its operands. So, in the latter
11260 case, only strip conversions that don't change the signedness.
11262 Note that this is done as an internal manipulation within the
11263 constant folder, in order to find the simplest representation of
11264 the arguments so that their form can be studied. In any cases,
11265 the appropriate type conversions should be put back in the tree
11266 that will get out of the constant folder. */
11281 case COMPONENT_REF:
11282 if (TREE_CODE (arg0) == CONSTRUCTOR
11283 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11285 unsigned HOST_WIDE_INT idx;
11287 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11294 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11295 so all simple results must be passed through pedantic_non_lvalue. */
11296 if (TREE_CODE (arg0) == INTEGER_CST)
11298 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11299 tem = integer_zerop (arg0) ? op2 : op1;
11300 /* Only optimize constant conditions when the selected branch
11301 has the same type as the COND_EXPR. This avoids optimizing
11302 away "c ? x : throw", where the throw has a void type.
11303 Avoid throwing away that operand which contains label. */
11304 if ((!TREE_SIDE_EFFECTS (unused_op)
11305 || !contains_label_p (unused_op))
11306 && (! VOID_TYPE_P (TREE_TYPE (tem))
11307 || VOID_TYPE_P (type)))
11308 return pedantic_non_lvalue (tem);
11311 if (operand_equal_p (arg1, op2, 0))
11312 return pedantic_omit_one_operand (type, arg1, arg0);
11314 /* If we have A op B ? A : C, we may be able to convert this to a
11315 simpler expression, depending on the operation and the values
11316 of B and C. Signed zeros prevent all of these transformations,
11317 for reasons given above each one.
11319 Also try swapping the arguments and inverting the conditional. */
11320 if (COMPARISON_CLASS_P (arg0)
11321 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11322 arg1, TREE_OPERAND (arg0, 1))
11323 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11325 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11330 if (COMPARISON_CLASS_P (arg0)
11331 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11333 TREE_OPERAND (arg0, 1))
11334 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11336 tem = fold_truth_not_expr (arg0);
11337 if (tem && COMPARISON_CLASS_P (tem))
11339 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11345 /* If the second operand is simpler than the third, swap them
11346 since that produces better jump optimization results. */
11347 if (truth_value_p (TREE_CODE (arg0))
11348 && tree_swap_operands_p (op1, op2, false))
11350 /* See if this can be inverted. If it can't, possibly because
11351 it was a floating-point inequality comparison, don't do
11353 tem = fold_truth_not_expr (arg0);
11355 return fold_build3 (code, type, tem, op2, op1);
11358 /* Convert A ? 1 : 0 to simply A. */
11359 if (integer_onep (op1)
11360 && integer_zerop (op2)
11361 /* If we try to convert OP0 to our type, the
11362 call to fold will try to move the conversion inside
11363 a COND, which will recurse. In that case, the COND_EXPR
11364 is probably the best choice, so leave it alone. */
11365 && type == TREE_TYPE (arg0))
11366 return pedantic_non_lvalue (arg0);
11368 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11369 over COND_EXPR in cases such as floating point comparisons. */
11370 if (integer_zerop (op1)
11371 && integer_onep (op2)
11372 && truth_value_p (TREE_CODE (arg0)))
11373 return pedantic_non_lvalue (fold_convert (type,
11374 invert_truthvalue (arg0)));
11376 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11377 if (TREE_CODE (arg0) == LT_EXPR
11378 && integer_zerop (TREE_OPERAND (arg0, 1))
11379 && integer_zerop (op2)
11380 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11382 /* sign_bit_p only checks ARG1 bits within A's precision.
11383 If <sign bit of A> has wider type than A, bits outside
11384 of A's precision in <sign bit of A> need to be checked.
11385 If they are all 0, this optimization needs to be done
11386 in unsigned A's type, if they are all 1 in signed A's type,
11387 otherwise this can't be done. */
11388 if (TYPE_PRECISION (TREE_TYPE (tem))
11389 < TYPE_PRECISION (TREE_TYPE (arg1))
11390 && TYPE_PRECISION (TREE_TYPE (tem))
11391 < TYPE_PRECISION (type))
11393 unsigned HOST_WIDE_INT mask_lo;
11394 HOST_WIDE_INT mask_hi;
11395 int inner_width, outer_width;
11398 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11399 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11400 if (outer_width > TYPE_PRECISION (type))
11401 outer_width = TYPE_PRECISION (type);
11403 if (outer_width > HOST_BITS_PER_WIDE_INT)
11405 mask_hi = ((unsigned HOST_WIDE_INT) -1
11406 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11412 mask_lo = ((unsigned HOST_WIDE_INT) -1
11413 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11415 if (inner_width > HOST_BITS_PER_WIDE_INT)
11417 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11418 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11422 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11423 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11425 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11426 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11428 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11429 tem = fold_convert (tem_type, tem);
11431 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11432 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11434 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11435 tem = fold_convert (tem_type, tem);
11442 return fold_convert (type,
11443 fold_build2 (BIT_AND_EXPR,
11444 TREE_TYPE (tem), tem,
11445 fold_convert (TREE_TYPE (tem),
11449 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11450 already handled above. */
11451 if (TREE_CODE (arg0) == BIT_AND_EXPR
11452 && integer_onep (TREE_OPERAND (arg0, 1))
11453 && integer_zerop (op2)
11454 && integer_pow2p (arg1))
11456 tree tem = TREE_OPERAND (arg0, 0);
11458 if (TREE_CODE (tem) == RSHIFT_EXPR
11459 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11460 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11461 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11462 return fold_build2 (BIT_AND_EXPR, type,
11463 TREE_OPERAND (tem, 0), arg1);
11466 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11467 is probably obsolete because the first operand should be a
11468 truth value (that's why we have the two cases above), but let's
11469 leave it in until we can confirm this for all front-ends. */
11470 if (integer_zerop (op2)
11471 && TREE_CODE (arg0) == NE_EXPR
11472 && integer_zerop (TREE_OPERAND (arg0, 1))
11473 && integer_pow2p (arg1)
11474 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11475 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11476 arg1, OEP_ONLY_CONST))
11477 return pedantic_non_lvalue (fold_convert (type,
11478 TREE_OPERAND (arg0, 0)));
11480 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11481 if (integer_zerop (op2)
11482 && truth_value_p (TREE_CODE (arg0))
11483 && truth_value_p (TREE_CODE (arg1)))
11484 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11485 fold_convert (type, arg0),
11488 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11489 if (integer_onep (op2)
11490 && truth_value_p (TREE_CODE (arg0))
11491 && truth_value_p (TREE_CODE (arg1)))
11493 /* Only perform transformation if ARG0 is easily inverted. */
11494 tem = fold_truth_not_expr (arg0);
11496 return fold_build2 (TRUTH_ORIF_EXPR, type,
11497 fold_convert (type, tem),
11501 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11502 if (integer_zerop (arg1)
11503 && truth_value_p (TREE_CODE (arg0))
11504 && truth_value_p (TREE_CODE (op2)))
11506 /* Only perform transformation if ARG0 is easily inverted. */
11507 tem = fold_truth_not_expr (arg0);
11509 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11510 fold_convert (type, tem),
11514 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11515 if (integer_onep (arg1)
11516 && truth_value_p (TREE_CODE (arg0))
11517 && truth_value_p (TREE_CODE (op2)))
11518 return fold_build2 (TRUTH_ORIF_EXPR, type,
11519 fold_convert (type, arg0),
11525 /* Check for a built-in function. */
11526 if (TREE_CODE (op0) == ADDR_EXPR
11527 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11528 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11529 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11532 case BIT_FIELD_REF:
11533 if (TREE_CODE (arg0) == VECTOR_CST
11534 && type == TREE_TYPE (TREE_TYPE (arg0))
11535 && host_integerp (arg1, 1)
11536 && host_integerp (op2, 1))
11538 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11539 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11542 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11543 && (idx % width) == 0
11544 && (idx = idx / width)
11545 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11547 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11548 while (idx-- > 0 && elements)
11549 elements = TREE_CHAIN (elements);
11551 return TREE_VALUE (elements);
11553 return fold_convert (type, integer_zero_node);
11560 } /* switch (code) */
11563 /* Perform constant folding and related simplification of EXPR.
11564 The related simplifications include x*1 => x, x*0 => 0, etc.,
11565 and application of the associative law.
11566 NOP_EXPR conversions may be removed freely (as long as we
11567 are careful not to change the type of the overall expression).
11568 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11569 but we can constant-fold them if they have constant operands. */
11571 #ifdef ENABLE_FOLD_CHECKING
11572 # define fold(x) fold_1 (x)
11573 static tree fold_1 (tree);
11579 const tree t = expr;
11580 enum tree_code code = TREE_CODE (t);
11581 enum tree_code_class kind = TREE_CODE_CLASS (code);
11584 /* Return right away if a constant. */
11585 if (kind == tcc_constant)
11588 if (IS_EXPR_CODE_CLASS (kind))
11590 tree type = TREE_TYPE (t);
11591 tree op0, op1, op2;
11593 switch (TREE_CODE_LENGTH (code))
11596 op0 = TREE_OPERAND (t, 0);
11597 tem = fold_unary (code, type, op0);
11598 return tem ? tem : expr;
11600 op0 = TREE_OPERAND (t, 0);
11601 op1 = TREE_OPERAND (t, 1);
11602 tem = fold_binary (code, type, op0, op1);
11603 return tem ? tem : expr;
11605 op0 = TREE_OPERAND (t, 0);
11606 op1 = TREE_OPERAND (t, 1);
11607 op2 = TREE_OPERAND (t, 2);
11608 tem = fold_ternary (code, type, op0, op1, op2);
11609 return tem ? tem : expr;
11618 return fold (DECL_INITIAL (t));
11622 } /* switch (code) */
11625 #ifdef ENABLE_FOLD_CHECKING
11628 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11629 static void fold_check_failed (tree, tree);
11630 void print_fold_checksum (tree);
11632 /* When --enable-checking=fold, compute a digest of expr before
11633 and after actual fold call to see if fold did not accidentally
11634 change original expr. */
11640 struct md5_ctx ctx;
11641 unsigned char checksum_before[16], checksum_after[16];
11644 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11645 md5_init_ctx (&ctx);
11646 fold_checksum_tree (expr, &ctx, ht);
11647 md5_finish_ctx (&ctx, checksum_before);
11650 ret = fold_1 (expr);
11652 md5_init_ctx (&ctx);
11653 fold_checksum_tree (expr, &ctx, ht);
11654 md5_finish_ctx (&ctx, checksum_after);
11657 if (memcmp (checksum_before, checksum_after, 16))
11658 fold_check_failed (expr, ret);
11664 print_fold_checksum (tree expr)
11666 struct md5_ctx ctx;
11667 unsigned char checksum[16], cnt;
11670 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11671 md5_init_ctx (&ctx);
11672 fold_checksum_tree (expr, &ctx, ht);
11673 md5_finish_ctx (&ctx, checksum);
11675 for (cnt = 0; cnt < 16; ++cnt)
11676 fprintf (stderr, "%02x", checksum[cnt]);
11677 putc ('\n', stderr);
11681 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11683 internal_error ("fold check: original tree changed by fold");
11687 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11690 enum tree_code code;
11691 struct tree_function_decl buf;
11696 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11697 <= sizeof (struct tree_function_decl))
11698 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11701 slot = htab_find_slot (ht, expr, INSERT);
11705 code = TREE_CODE (expr);
11706 if (TREE_CODE_CLASS (code) == tcc_declaration
11707 && DECL_ASSEMBLER_NAME_SET_P (expr))
11709 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11710 memcpy ((char *) &buf, expr, tree_size (expr));
11711 expr = (tree) &buf;
11712 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11714 else if (TREE_CODE_CLASS (code) == tcc_type
11715 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11716 || TYPE_CACHED_VALUES_P (expr)
11717 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11719 /* Allow these fields to be modified. */
11720 memcpy ((char *) &buf, expr, tree_size (expr));
11721 expr = (tree) &buf;
11722 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11723 TYPE_POINTER_TO (expr) = NULL;
11724 TYPE_REFERENCE_TO (expr) = NULL;
11725 if (TYPE_CACHED_VALUES_P (expr))
11727 TYPE_CACHED_VALUES_P (expr) = 0;
11728 TYPE_CACHED_VALUES (expr) = NULL;
11731 md5_process_bytes (expr, tree_size (expr), ctx);
11732 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11733 if (TREE_CODE_CLASS (code) != tcc_type
11734 && TREE_CODE_CLASS (code) != tcc_declaration
11735 && code != TREE_LIST)
11736 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11737 switch (TREE_CODE_CLASS (code))
11743 md5_process_bytes (TREE_STRING_POINTER (expr),
11744 TREE_STRING_LENGTH (expr), ctx);
11747 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11748 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11751 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11757 case tcc_exceptional:
11761 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11762 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11763 expr = TREE_CHAIN (expr);
11764 goto recursive_label;
11767 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11768 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11774 case tcc_expression:
11775 case tcc_reference:
11776 case tcc_comparison:
11779 case tcc_statement:
11780 len = TREE_CODE_LENGTH (code);
11781 for (i = 0; i < len; ++i)
11782 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11784 case tcc_declaration:
11785 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11786 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11787 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11789 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11790 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11791 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11792 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11793 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11795 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11796 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11798 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11800 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11801 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11802 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11806 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11807 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11808 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11809 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11810 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11811 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11812 if (INTEGRAL_TYPE_P (expr)
11813 || SCALAR_FLOAT_TYPE_P (expr))
11815 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11816 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11818 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11819 if (TREE_CODE (expr) == RECORD_TYPE
11820 || TREE_CODE (expr) == UNION_TYPE
11821 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11822 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11823 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11832 /* Fold a unary tree expression with code CODE of type TYPE with an
11833 operand OP0. Return a folded expression if successful. Otherwise,
11834 return a tree expression with code CODE of type TYPE with an
11838 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11841 #ifdef ENABLE_FOLD_CHECKING
11842 unsigned char checksum_before[16], checksum_after[16];
11843 struct md5_ctx ctx;
11846 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11847 md5_init_ctx (&ctx);
11848 fold_checksum_tree (op0, &ctx, ht);
11849 md5_finish_ctx (&ctx, checksum_before);
11853 tem = fold_unary (code, type, op0);
11855 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11857 #ifdef ENABLE_FOLD_CHECKING
11858 md5_init_ctx (&ctx);
11859 fold_checksum_tree (op0, &ctx, ht);
11860 md5_finish_ctx (&ctx, checksum_after);
11863 if (memcmp (checksum_before, checksum_after, 16))
11864 fold_check_failed (op0, tem);
11869 /* Fold a binary tree expression with code CODE of type TYPE with
11870 operands OP0 and OP1. Return a folded expression if successful.
11871 Otherwise, return a tree expression with code CODE of type TYPE
11872 with operands OP0 and OP1. */
11875 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11879 #ifdef ENABLE_FOLD_CHECKING
11880 unsigned char checksum_before_op0[16],
11881 checksum_before_op1[16],
11882 checksum_after_op0[16],
11883 checksum_after_op1[16];
11884 struct md5_ctx ctx;
11887 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11888 md5_init_ctx (&ctx);
11889 fold_checksum_tree (op0, &ctx, ht);
11890 md5_finish_ctx (&ctx, checksum_before_op0);
11893 md5_init_ctx (&ctx);
11894 fold_checksum_tree (op1, &ctx, ht);
11895 md5_finish_ctx (&ctx, checksum_before_op1);
11899 tem = fold_binary (code, type, op0, op1);
11901 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11903 #ifdef ENABLE_FOLD_CHECKING
11904 md5_init_ctx (&ctx);
11905 fold_checksum_tree (op0, &ctx, ht);
11906 md5_finish_ctx (&ctx, checksum_after_op0);
11909 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11910 fold_check_failed (op0, tem);
11912 md5_init_ctx (&ctx);
11913 fold_checksum_tree (op1, &ctx, ht);
11914 md5_finish_ctx (&ctx, checksum_after_op1);
11917 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11918 fold_check_failed (op1, tem);
11923 /* Fold a ternary tree expression with code CODE of type TYPE with
11924 operands OP0, OP1, and OP2. Return a folded expression if
11925 successful. Otherwise, return a tree expression with code CODE of
11926 type TYPE with operands OP0, OP1, and OP2. */
11929 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11933 #ifdef ENABLE_FOLD_CHECKING
11934 unsigned char checksum_before_op0[16],
11935 checksum_before_op1[16],
11936 checksum_before_op2[16],
11937 checksum_after_op0[16],
11938 checksum_after_op1[16],
11939 checksum_after_op2[16];
11940 struct md5_ctx ctx;
11943 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11944 md5_init_ctx (&ctx);
11945 fold_checksum_tree (op0, &ctx, ht);
11946 md5_finish_ctx (&ctx, checksum_before_op0);
11949 md5_init_ctx (&ctx);
11950 fold_checksum_tree (op1, &ctx, ht);
11951 md5_finish_ctx (&ctx, checksum_before_op1);
11954 md5_init_ctx (&ctx);
11955 fold_checksum_tree (op2, &ctx, ht);
11956 md5_finish_ctx (&ctx, checksum_before_op2);
11960 tem = fold_ternary (code, type, op0, op1, op2);
11962 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11964 #ifdef ENABLE_FOLD_CHECKING
11965 md5_init_ctx (&ctx);
11966 fold_checksum_tree (op0, &ctx, ht);
11967 md5_finish_ctx (&ctx, checksum_after_op0);
11970 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11971 fold_check_failed (op0, tem);
11973 md5_init_ctx (&ctx);
11974 fold_checksum_tree (op1, &ctx, ht);
11975 md5_finish_ctx (&ctx, checksum_after_op1);
11978 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11979 fold_check_failed (op1, tem);
11981 md5_init_ctx (&ctx);
11982 fold_checksum_tree (op2, &ctx, ht);
11983 md5_finish_ctx (&ctx, checksum_after_op2);
11986 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11987 fold_check_failed (op2, tem);
11992 /* Perform constant folding and related simplification of initializer
11993 expression EXPR. These behave identically to "fold_buildN" but ignore
11994 potential run-time traps and exceptions that fold must preserve. */
11996 #define START_FOLD_INIT \
11997 int saved_signaling_nans = flag_signaling_nans;\
11998 int saved_trapping_math = flag_trapping_math;\
11999 int saved_rounding_math = flag_rounding_math;\
12000 int saved_trapv = flag_trapv;\
12001 int saved_folding_initializer = folding_initializer;\
12002 flag_signaling_nans = 0;\
12003 flag_trapping_math = 0;\
12004 flag_rounding_math = 0;\
12006 folding_initializer = 1;
12008 #define END_FOLD_INIT \
12009 flag_signaling_nans = saved_signaling_nans;\
12010 flag_trapping_math = saved_trapping_math;\
12011 flag_rounding_math = saved_rounding_math;\
12012 flag_trapv = saved_trapv;\
12013 folding_initializer = saved_folding_initializer;
12016 fold_build1_initializer (enum tree_code code, tree type, tree op)
12021 result = fold_build1 (code, type, op);
12028 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12033 result = fold_build2 (code, type, op0, op1);
12040 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12046 result = fold_build3 (code, type, op0, op1, op2);
12052 #undef START_FOLD_INIT
12053 #undef END_FOLD_INIT
12055 /* Determine if first argument is a multiple of second argument. Return 0 if
12056 it is not, or we cannot easily determined it to be.
12058 An example of the sort of thing we care about (at this point; this routine
12059 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12060 fold cases do now) is discovering that
12062 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12068 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12070 This code also handles discovering that
12072 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12074 is a multiple of 8 so we don't have to worry about dealing with a
12075 possible remainder.
12077 Note that we *look* inside a SAVE_EXPR only to determine how it was
12078 calculated; it is not safe for fold to do much of anything else with the
12079 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12080 at run time. For example, the latter example above *cannot* be implemented
12081 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12082 evaluation time of the original SAVE_EXPR is not necessarily the same at
12083 the time the new expression is evaluated. The only optimization of this
12084 sort that would be valid is changing
12086 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12090 SAVE_EXPR (I) * SAVE_EXPR (J)
12092 (where the same SAVE_EXPR (J) is used in the original and the
12093 transformed version). */
12096 multiple_of_p (tree type, tree top, tree bottom)
12098 if (operand_equal_p (top, bottom, 0))
12101 if (TREE_CODE (type) != INTEGER_TYPE)
12104 switch (TREE_CODE (top))
12107 /* Bitwise and provides a power of two multiple. If the mask is
12108 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12109 if (!integer_pow2p (bottom))
12114 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12115 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12119 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12120 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12123 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12127 op1 = TREE_OPERAND (top, 1);
12128 /* const_binop may not detect overflow correctly,
12129 so check for it explicitly here. */
12130 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12131 > TREE_INT_CST_LOW (op1)
12132 && TREE_INT_CST_HIGH (op1) == 0
12133 && 0 != (t1 = fold_convert (type,
12134 const_binop (LSHIFT_EXPR,
12137 && ! TREE_OVERFLOW (t1))
12138 return multiple_of_p (type, t1, bottom);
12143 /* Can't handle conversions from non-integral or wider integral type. */
12144 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12145 || (TYPE_PRECISION (type)
12146 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12149 /* .. fall through ... */
12152 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12155 if (TREE_CODE (bottom) != INTEGER_CST
12156 || (TYPE_UNSIGNED (type)
12157 && (tree_int_cst_sgn (top) < 0
12158 || tree_int_cst_sgn (bottom) < 0)))
12160 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12168 /* Return true if `t' is known to be non-negative. */
12171 tree_expr_nonnegative_p (tree t)
12173 if (t == error_mark_node)
12176 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12179 switch (TREE_CODE (t))
12182 /* Query VRP to see if it has recorded any information about
12183 the range of this object. */
12184 return ssa_name_nonnegative_p (t);
12187 /* We can't return 1 if flag_wrapv is set because
12188 ABS_EXPR<INT_MIN> = INT_MIN. */
12189 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12194 return tree_int_cst_sgn (t) >= 0;
12197 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12200 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12201 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12202 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12204 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12205 both unsigned and at least 2 bits shorter than the result. */
12206 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12207 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12208 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12210 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12211 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12212 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12213 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12215 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12216 TYPE_PRECISION (inner2)) + 1;
12217 return prec < TYPE_PRECISION (TREE_TYPE (t));
12223 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12225 /* x * x for floating point x is always non-negative. */
12226 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12228 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12229 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12232 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12233 both unsigned and their total bits is shorter than the result. */
12234 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12235 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12236 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12238 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12239 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12240 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12241 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12242 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12243 < TYPE_PRECISION (TREE_TYPE (t));
12249 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12250 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12256 case TRUNC_DIV_EXPR:
12257 case CEIL_DIV_EXPR:
12258 case FLOOR_DIV_EXPR:
12259 case ROUND_DIV_EXPR:
12260 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12261 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12263 case TRUNC_MOD_EXPR:
12264 case CEIL_MOD_EXPR:
12265 case FLOOR_MOD_EXPR:
12266 case ROUND_MOD_EXPR:
12268 case NON_LVALUE_EXPR:
12270 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12272 case COMPOUND_EXPR:
12274 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12277 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12280 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12281 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12285 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12286 tree outer_type = TREE_TYPE (t);
12288 if (TREE_CODE (outer_type) == REAL_TYPE)
12290 if (TREE_CODE (inner_type) == REAL_TYPE)
12291 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12292 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12294 if (TYPE_UNSIGNED (inner_type))
12296 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12299 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12301 if (TREE_CODE (inner_type) == REAL_TYPE)
12302 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12303 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12304 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12305 && TYPE_UNSIGNED (inner_type);
12312 tree temp = TARGET_EXPR_SLOT (t);
12313 t = TARGET_EXPR_INITIAL (t);
12315 /* If the initializer is non-void, then it's a normal expression
12316 that will be assigned to the slot. */
12317 if (!VOID_TYPE_P (t))
12318 return tree_expr_nonnegative_p (t);
12320 /* Otherwise, the initializer sets the slot in some way. One common
12321 way is an assignment statement at the end of the initializer. */
12324 if (TREE_CODE (t) == BIND_EXPR)
12325 t = expr_last (BIND_EXPR_BODY (t));
12326 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12327 || TREE_CODE (t) == TRY_CATCH_EXPR)
12328 t = expr_last (TREE_OPERAND (t, 0));
12329 else if (TREE_CODE (t) == STATEMENT_LIST)
12334 if (TREE_CODE (t) == MODIFY_EXPR
12335 && TREE_OPERAND (t, 0) == temp)
12336 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12343 tree fndecl = get_callee_fndecl (t);
12344 tree arglist = TREE_OPERAND (t, 1);
12345 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12346 switch (DECL_FUNCTION_CODE (fndecl))
12348 CASE_FLT_FN (BUILT_IN_ACOS):
12349 CASE_FLT_FN (BUILT_IN_ACOSH):
12350 CASE_FLT_FN (BUILT_IN_CABS):
12351 CASE_FLT_FN (BUILT_IN_COSH):
12352 CASE_FLT_FN (BUILT_IN_ERFC):
12353 CASE_FLT_FN (BUILT_IN_EXP):
12354 CASE_FLT_FN (BUILT_IN_EXP10):
12355 CASE_FLT_FN (BUILT_IN_EXP2):
12356 CASE_FLT_FN (BUILT_IN_FABS):
12357 CASE_FLT_FN (BUILT_IN_FDIM):
12358 CASE_FLT_FN (BUILT_IN_HYPOT):
12359 CASE_FLT_FN (BUILT_IN_POW10):
12360 CASE_INT_FN (BUILT_IN_FFS):
12361 CASE_INT_FN (BUILT_IN_PARITY):
12362 CASE_INT_FN (BUILT_IN_POPCOUNT):
12363 case BUILT_IN_BSWAP32:
12364 case BUILT_IN_BSWAP64:
12368 CASE_FLT_FN (BUILT_IN_SQRT):
12369 /* sqrt(-0.0) is -0.0. */
12370 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12372 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12374 CASE_FLT_FN (BUILT_IN_ASINH):
12375 CASE_FLT_FN (BUILT_IN_ATAN):
12376 CASE_FLT_FN (BUILT_IN_ATANH):
12377 CASE_FLT_FN (BUILT_IN_CBRT):
12378 CASE_FLT_FN (BUILT_IN_CEIL):
12379 CASE_FLT_FN (BUILT_IN_ERF):
12380 CASE_FLT_FN (BUILT_IN_EXPM1):
12381 CASE_FLT_FN (BUILT_IN_FLOOR):
12382 CASE_FLT_FN (BUILT_IN_FMOD):
12383 CASE_FLT_FN (BUILT_IN_FREXP):
12384 CASE_FLT_FN (BUILT_IN_LCEIL):
12385 CASE_FLT_FN (BUILT_IN_LDEXP):
12386 CASE_FLT_FN (BUILT_IN_LFLOOR):
12387 CASE_FLT_FN (BUILT_IN_LLCEIL):
12388 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12389 CASE_FLT_FN (BUILT_IN_LLRINT):
12390 CASE_FLT_FN (BUILT_IN_LLROUND):
12391 CASE_FLT_FN (BUILT_IN_LRINT):
12392 CASE_FLT_FN (BUILT_IN_LROUND):
12393 CASE_FLT_FN (BUILT_IN_MODF):
12394 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12395 CASE_FLT_FN (BUILT_IN_RINT):
12396 CASE_FLT_FN (BUILT_IN_ROUND):
12397 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12398 CASE_FLT_FN (BUILT_IN_SINH):
12399 CASE_FLT_FN (BUILT_IN_TANH):
12400 CASE_FLT_FN (BUILT_IN_TRUNC):
12401 /* True if the 1st argument is nonnegative. */
12402 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12404 CASE_FLT_FN (BUILT_IN_FMAX):
12405 /* True if the 1st OR 2nd arguments are nonnegative. */
12406 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12407 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12409 CASE_FLT_FN (BUILT_IN_FMIN):
12410 /* True if the 1st AND 2nd arguments are nonnegative. */
12411 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12412 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12414 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12415 /* True if the 2nd argument is nonnegative. */
12416 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12418 CASE_FLT_FN (BUILT_IN_POWI):
12419 /* True if the 1st argument is nonnegative or the second
12420 argument is an even integer. */
12421 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12423 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12424 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12427 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12429 CASE_FLT_FN (BUILT_IN_POW):
12430 /* True if the 1st argument is nonnegative or the second
12431 argument is an even integer valued real. */
12432 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12437 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12438 n = real_to_integer (&c);
12441 REAL_VALUE_TYPE cint;
12442 real_from_integer (&cint, VOIDmode, n,
12443 n < 0 ? -1 : 0, 0);
12444 if (real_identical (&c, &cint))
12448 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12455 /* ... fall through ... */
12458 if (truth_value_p (TREE_CODE (t)))
12459 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12463 /* We don't know sign of `t', so be conservative and return false. */
12467 /* Return true when T is an address and is known to be nonzero.
12468 For floating point we further ensure that T is not denormal.
12469 Similar logic is present in nonzero_address in rtlanal.h. */
12472 tree_expr_nonzero_p (tree t)
12474 tree type = TREE_TYPE (t);
12476 /* Doing something useful for floating point would need more work. */
12477 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12480 switch (TREE_CODE (t))
12483 /* Query VRP to see if it has recorded any information about
12484 the range of this object. */
12485 return ssa_name_nonzero_p (t);
12488 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12491 /* We used to test for !integer_zerop here. This does not work correctly
12492 if TREE_CONSTANT_OVERFLOW (t). */
12493 return (TREE_INT_CST_LOW (t) != 0
12494 || TREE_INT_CST_HIGH (t) != 0);
12497 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12499 /* With the presence of negative values it is hard
12500 to say something. */
12501 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12502 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12504 /* One of operands must be positive and the other non-negative. */
12505 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12506 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12511 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12513 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12514 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12520 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12521 tree outer_type = TREE_TYPE (t);
12523 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12524 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12530 tree base = get_base_address (TREE_OPERAND (t, 0));
12535 /* Weak declarations may link to NULL. */
12536 if (VAR_OR_FUNCTION_DECL_P (base))
12537 return !DECL_WEAK (base);
12539 /* Constants are never weak. */
12540 if (CONSTANT_CLASS_P (base))
12547 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12548 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12551 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12552 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12555 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12557 /* When both operands are nonzero, then MAX must be too. */
12558 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12561 /* MAX where operand 0 is positive is positive. */
12562 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12564 /* MAX where operand 1 is positive is positive. */
12565 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12566 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12570 case COMPOUND_EXPR:
12573 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12576 case NON_LVALUE_EXPR:
12577 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12580 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12581 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12584 return alloca_call_p (t);
12592 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12593 attempt to fold the expression to a constant without modifying TYPE,
12596 If the expression could be simplified to a constant, then return
12597 the constant. If the expression would not be simplified to a
12598 constant, then return NULL_TREE. */
12601 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12603 tree tem = fold_binary (code, type, op0, op1);
12604 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12607 /* Given the components of a unary expression CODE, TYPE and OP0,
12608 attempt to fold the expression to a constant without modifying
12611 If the expression could be simplified to a constant, then return
12612 the constant. If the expression would not be simplified to a
12613 constant, then return NULL_TREE. */
12616 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12618 tree tem = fold_unary (code, type, op0);
12619 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12622 /* If EXP represents referencing an element in a constant string
12623 (either via pointer arithmetic or array indexing), return the
12624 tree representing the value accessed, otherwise return NULL. */
12627 fold_read_from_constant_string (tree exp)
12629 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12631 tree exp1 = TREE_OPERAND (exp, 0);
12635 if (TREE_CODE (exp) == INDIRECT_REF)
12636 string = string_constant (exp1, &index);
12639 tree low_bound = array_ref_low_bound (exp);
12640 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12642 /* Optimize the special-case of a zero lower bound.
12644 We convert the low_bound to sizetype to avoid some problems
12645 with constant folding. (E.g. suppose the lower bound is 1,
12646 and its mode is QI. Without the conversion,l (ARRAY
12647 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12648 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12649 if (! integer_zerop (low_bound))
12650 index = size_diffop (index, fold_convert (sizetype, low_bound));
12656 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12657 && TREE_CODE (string) == STRING_CST
12658 && TREE_CODE (index) == INTEGER_CST
12659 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12660 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12662 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12663 return fold_convert (TREE_TYPE (exp),
12664 build_int_cst (NULL_TREE,
12665 (TREE_STRING_POINTER (string)
12666 [TREE_INT_CST_LOW (index)])));
12671 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12672 an integer constant or real constant.
12674 TYPE is the type of the result. */
12677 fold_negate_const (tree arg0, tree type)
12679 tree t = NULL_TREE;
12681 switch (TREE_CODE (arg0))
12685 unsigned HOST_WIDE_INT low;
12686 HOST_WIDE_INT high;
12687 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12688 TREE_INT_CST_HIGH (arg0),
12690 t = build_int_cst_wide (type, low, high);
12691 t = force_fit_type (t, 1,
12692 (overflow | TREE_OVERFLOW (arg0))
12693 && !TYPE_UNSIGNED (type),
12694 TREE_CONSTANT_OVERFLOW (arg0));
12699 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12703 gcc_unreachable ();
12709 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12710 an integer constant or real constant.
12712 TYPE is the type of the result. */
12715 fold_abs_const (tree arg0, tree type)
12717 tree t = NULL_TREE;
12719 switch (TREE_CODE (arg0))
12722 /* If the value is unsigned, then the absolute value is
12723 the same as the ordinary value. */
12724 if (TYPE_UNSIGNED (type))
12726 /* Similarly, if the value is non-negative. */
12727 else if (INT_CST_LT (integer_minus_one_node, arg0))
12729 /* If the value is negative, then the absolute value is
12733 unsigned HOST_WIDE_INT low;
12734 HOST_WIDE_INT high;
12735 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12736 TREE_INT_CST_HIGH (arg0),
12738 t = build_int_cst_wide (type, low, high);
12739 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12740 TREE_CONSTANT_OVERFLOW (arg0));
12745 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12746 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12752 gcc_unreachable ();
12758 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12759 constant. TYPE is the type of the result. */
12762 fold_not_const (tree arg0, tree type)
12764 tree t = NULL_TREE;
12766 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12768 t = build_int_cst_wide (type,
12769 ~ TREE_INT_CST_LOW (arg0),
12770 ~ TREE_INT_CST_HIGH (arg0));
12771 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12772 TREE_CONSTANT_OVERFLOW (arg0));
12777 /* Given CODE, a relational operator, the target type, TYPE and two
12778 constant operands OP0 and OP1, return the result of the
12779 relational operation. If the result is not a compile time
12780 constant, then return NULL_TREE. */
12783 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12785 int result, invert;
12787 /* From here on, the only cases we handle are when the result is
12788 known to be a constant. */
12790 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12792 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12793 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12795 /* Handle the cases where either operand is a NaN. */
12796 if (real_isnan (c0) || real_isnan (c1))
12806 case UNORDERED_EXPR:
12820 if (flag_trapping_math)
12826 gcc_unreachable ();
12829 return constant_boolean_node (result, type);
12832 return constant_boolean_node (real_compare (code, c0, c1), type);
12835 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12837 To compute GT, swap the arguments and do LT.
12838 To compute GE, do LT and invert the result.
12839 To compute LE, swap the arguments, do LT and invert the result.
12840 To compute NE, do EQ and invert the result.
12842 Therefore, the code below must handle only EQ and LT. */
12844 if (code == LE_EXPR || code == GT_EXPR)
12849 code = swap_tree_comparison (code);
12852 /* Note that it is safe to invert for real values here because we
12853 have already handled the one case that it matters. */
12856 if (code == NE_EXPR || code == GE_EXPR)
12859 code = invert_tree_comparison (code, false);
12862 /* Compute a result for LT or EQ if args permit;
12863 Otherwise return T. */
12864 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12866 if (code == EQ_EXPR)
12867 result = tree_int_cst_equal (op0, op1);
12868 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12869 result = INT_CST_LT_UNSIGNED (op0, op1);
12871 result = INT_CST_LT (op0, op1);
12878 return constant_boolean_node (result, type);
12881 /* Build an expression for the a clean point containing EXPR with type TYPE.
12882 Don't build a cleanup point expression for EXPR which don't have side
12886 fold_build_cleanup_point_expr (tree type, tree expr)
12888 /* If the expression does not have side effects then we don't have to wrap
12889 it with a cleanup point expression. */
12890 if (!TREE_SIDE_EFFECTS (expr))
12893 /* If the expression is a return, check to see if the expression inside the
12894 return has no side effects or the right hand side of the modify expression
12895 inside the return. If either don't have side effects set we don't need to
12896 wrap the expression in a cleanup point expression. Note we don't check the
12897 left hand side of the modify because it should always be a return decl. */
12898 if (TREE_CODE (expr) == RETURN_EXPR)
12900 tree op = TREE_OPERAND (expr, 0);
12901 if (!op || !TREE_SIDE_EFFECTS (op))
12903 op = TREE_OPERAND (op, 1);
12904 if (!TREE_SIDE_EFFECTS (op))
12908 return build1 (CLEANUP_POINT_EXPR, type, expr);
12911 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12912 avoid confusing the gimplify process. */
12915 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12917 /* The size of the object is not relevant when talking about its address. */
12918 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12919 t = TREE_OPERAND (t, 0);
12921 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12922 if (TREE_CODE (t) == INDIRECT_REF
12923 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12925 t = TREE_OPERAND (t, 0);
12926 if (TREE_TYPE (t) != ptrtype)
12927 t = build1 (NOP_EXPR, ptrtype, t);
12933 while (handled_component_p (base))
12934 base = TREE_OPERAND (base, 0);
12936 TREE_ADDRESSABLE (base) = 1;
12938 t = build1 (ADDR_EXPR, ptrtype, t);
12945 build_fold_addr_expr (tree t)
12947 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12950 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12951 of an indirection through OP0, or NULL_TREE if no simplification is
12955 fold_indirect_ref_1 (tree type, tree op0)
12961 subtype = TREE_TYPE (sub);
12962 if (!POINTER_TYPE_P (subtype))
12965 if (TREE_CODE (sub) == ADDR_EXPR)
12967 tree op = TREE_OPERAND (sub, 0);
12968 tree optype = TREE_TYPE (op);
12969 /* *&p => p; make sure to handle *&"str"[cst] here. */
12970 if (type == optype)
12972 tree fop = fold_read_from_constant_string (op);
12978 /* *(foo *)&fooarray => fooarray[0] */
12979 else if (TREE_CODE (optype) == ARRAY_TYPE
12980 && type == TREE_TYPE (optype))
12982 tree type_domain = TYPE_DOMAIN (optype);
12983 tree min_val = size_zero_node;
12984 if (type_domain && TYPE_MIN_VALUE (type_domain))
12985 min_val = TYPE_MIN_VALUE (type_domain);
12986 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12988 /* *(foo *)&complexfoo => __real__ complexfoo */
12989 else if (TREE_CODE (optype) == COMPLEX_TYPE
12990 && type == TREE_TYPE (optype))
12991 return fold_build1 (REALPART_EXPR, type, op);
12994 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12995 if (TREE_CODE (sub) == PLUS_EXPR
12996 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12998 tree op00 = TREE_OPERAND (sub, 0);
12999 tree op01 = TREE_OPERAND (sub, 1);
13003 op00type = TREE_TYPE (op00);
13004 if (TREE_CODE (op00) == ADDR_EXPR
13005 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13006 && type == TREE_TYPE (TREE_TYPE (op00type)))
13008 tree size = TYPE_SIZE_UNIT (type);
13009 if (tree_int_cst_equal (size, op01))
13010 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13014 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13015 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13016 && type == TREE_TYPE (TREE_TYPE (subtype)))
13019 tree min_val = size_zero_node;
13020 sub = build_fold_indirect_ref (sub);
13021 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13022 if (type_domain && TYPE_MIN_VALUE (type_domain))
13023 min_val = TYPE_MIN_VALUE (type_domain);
13024 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13030 /* Builds an expression for an indirection through T, simplifying some
13034 build_fold_indirect_ref (tree t)
13036 tree type = TREE_TYPE (TREE_TYPE (t));
13037 tree sub = fold_indirect_ref_1 (type, t);
13042 return build1 (INDIRECT_REF, type, t);
13045 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13048 fold_indirect_ref (tree t)
13050 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13058 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13059 whose result is ignored. The type of the returned tree need not be
13060 the same as the original expression. */
13063 fold_ignored_result (tree t)
13065 if (!TREE_SIDE_EFFECTS (t))
13066 return integer_zero_node;
13069 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13072 t = TREE_OPERAND (t, 0);
13076 case tcc_comparison:
13077 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13078 t = TREE_OPERAND (t, 0);
13079 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13080 t = TREE_OPERAND (t, 1);
13085 case tcc_expression:
13086 switch (TREE_CODE (t))
13088 case COMPOUND_EXPR:
13089 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13091 t = TREE_OPERAND (t, 0);
13095 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13096 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13098 t = TREE_OPERAND (t, 0);
13111 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13112 This can only be applied to objects of a sizetype. */
13115 round_up (tree value, int divisor)
13117 tree div = NULL_TREE;
13119 gcc_assert (divisor > 0);
13123 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13124 have to do anything. Only do this when we are not given a const,
13125 because in that case, this check is more expensive than just
13127 if (TREE_CODE (value) != INTEGER_CST)
13129 div = build_int_cst (TREE_TYPE (value), divisor);
13131 if (multiple_of_p (TREE_TYPE (value), value, div))
13135 /* If divisor is a power of two, simplify this to bit manipulation. */
13136 if (divisor == (divisor & -divisor))
13140 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13141 value = size_binop (PLUS_EXPR, value, t);
13142 t = build_int_cst (TREE_TYPE (value), -divisor);
13143 value = size_binop (BIT_AND_EXPR, value, t);
13148 div = build_int_cst (TREE_TYPE (value), divisor);
13149 value = size_binop (CEIL_DIV_EXPR, value, div);
13150 value = size_binop (MULT_EXPR, value, div);
13156 /* Likewise, but round down. */
13159 round_down (tree value, int divisor)
13161 tree div = NULL_TREE;
13163 gcc_assert (divisor > 0);
13167 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13168 have to do anything. Only do this when we are not given a const,
13169 because in that case, this check is more expensive than just
13171 if (TREE_CODE (value) != INTEGER_CST)
13173 div = build_int_cst (TREE_TYPE (value), divisor);
13175 if (multiple_of_p (TREE_TYPE (value), value, div))
13179 /* If divisor is a power of two, simplify this to bit manipulation. */
13180 if (divisor == (divisor & -divisor))
13184 t = build_int_cst (TREE_TYPE (value), -divisor);
13185 value = size_binop (BIT_AND_EXPR, value, t);
13190 div = build_int_cst (TREE_TYPE (value), divisor);
13191 value = size_binop (FLOOR_DIV_EXPR, value, div);
13192 value = size_binop (MULT_EXPR, value, div);
13198 /* Returns the pointer to the base of the object addressed by EXP and
13199 extracts the information about the offset of the access, storing it
13200 to PBITPOS and POFFSET. */
13203 split_address_to_core_and_offset (tree exp,
13204 HOST_WIDE_INT *pbitpos, tree *poffset)
13207 enum machine_mode mode;
13208 int unsignedp, volatilep;
13209 HOST_WIDE_INT bitsize;
13211 if (TREE_CODE (exp) == ADDR_EXPR)
13213 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13214 poffset, &mode, &unsignedp, &volatilep,
13216 core = build_fold_addr_expr (core);
13222 *poffset = NULL_TREE;
13228 /* Returns true if addresses of E1 and E2 differ by a constant, false
13229 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13232 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13235 HOST_WIDE_INT bitpos1, bitpos2;
13236 tree toffset1, toffset2, tdiff, type;
13238 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13239 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13241 if (bitpos1 % BITS_PER_UNIT != 0
13242 || bitpos2 % BITS_PER_UNIT != 0
13243 || !operand_equal_p (core1, core2, 0))
13246 if (toffset1 && toffset2)
13248 type = TREE_TYPE (toffset1);
13249 if (type != TREE_TYPE (toffset2))
13250 toffset2 = fold_convert (type, toffset2);
13252 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13253 if (!cst_and_fits_in_hwi (tdiff))
13256 *diff = int_cst_value (tdiff);
13258 else if (toffset1 || toffset2)
13260 /* If only one of the offsets is non-constant, the difference cannot
13267 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13271 /* Simplify the floating point expression EXP when the sign of the
13272 result is not significant. Return NULL_TREE if no simplification
13276 fold_strip_sign_ops (tree exp)
13280 switch (TREE_CODE (exp))
13284 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13285 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13289 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13291 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13292 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13293 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13294 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13295 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13296 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13300 /* Strip sign ops from the argument of "odd" math functions. */
13301 if (negate_mathfn_p (builtin_mathfn_code (exp)))
13303 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13305 return build_function_call_expr (get_callee_fndecl (exp),
13306 build_tree_list (NULL_TREE, arg0));