1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 || (overflowable > 0 && sign_extended_type))
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
286 TREE_CONSTANT_OVERFLOW (t) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Return nonzero if the operation overflows according to UNSIGNED_P.
295 Each argument is given as two `HOST_WIDE_INT' pieces.
296 One argument is L1 and H1; the other, L2 and H2.
297 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
301 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
302 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305 unsigned HOST_WIDE_INT l;
309 h = h1 + h2 + (l < l1);
315 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
317 return OVERFLOW_SUM_SIGN (h1, h2, h);
320 /* Negate a doubleword integer with doubleword result.
321 Return nonzero if the operation overflows, assuming it's signed.
322 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
333 return (*hv & h1) < 0;
343 /* Multiply two doubleword integers with doubleword result.
344 Return nonzero if the operation overflows according to UNSIGNED_P.
345 Each argument is given as two `HOST_WIDE_INT' pieces.
346 One argument is L1 and H1; the other, L2 and H2.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355 HOST_WIDE_INT arg1[4];
356 HOST_WIDE_INT arg2[4];
357 HOST_WIDE_INT prod[4 * 2];
358 unsigned HOST_WIDE_INT carry;
360 unsigned HOST_WIDE_INT toplow, neglow;
361 HOST_WIDE_INT tophigh, neghigh;
363 encode (arg1, l1, h1);
364 encode (arg2, l2, h2);
366 memset (prod, 0, sizeof prod);
368 for (i = 0; i < 4; i++)
371 for (j = 0; j < 4; j++)
374 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
375 carry += arg1[i] * arg2[j];
376 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
378 prod[k] = LOWPART (carry);
379 carry = HIGHPART (carry);
384 decode (prod, lv, hv);
385 decode (prod + 4, &toplow, &tophigh);
387 /* Unsigned overflow is immediate. */
389 return (toplow | tophigh) != 0;
391 /* Check for signed overflow by calculating the signed representation of the
392 top half of the result; it should agree with the low half's sign bit. */
395 neg_double (l2, h2, &neglow, &neghigh);
396 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
400 neg_double (l1, h1, &neglow, &neghigh);
401 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
403 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406 /* Shift the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Shift right if COUNT is negative.
409 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
414 HOST_WIDE_INT count, unsigned int prec,
415 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
417 unsigned HOST_WIDE_INT signmask;
421 rshift_double (l1, h1, -count, prec, lv, hv, arith);
425 if (SHIFT_COUNT_TRUNCATED)
428 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
430 /* Shifting by the host word size is undefined according to the
431 ANSI standard, so we must handle this as a special case. */
435 else if (count >= HOST_BITS_PER_WIDE_INT)
437 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
442 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
443 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
447 /* Sign extend all bits that are beyond the precision. */
449 signmask = -((prec > HOST_BITS_PER_WIDE_INT
450 ? ((unsigned HOST_WIDE_INT) *hv
451 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
452 : (*lv >> (prec - 1))) & 1);
454 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
456 else if (prec >= HOST_BITS_PER_WIDE_INT)
458 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
459 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
464 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
465 *lv |= signmask << prec;
469 /* Shift the doubleword integer in L1, H1 right by COUNT places
470 keeping only PREC bits of result. COUNT must be positive.
471 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
472 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
476 HOST_WIDE_INT count, unsigned int prec,
477 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480 unsigned HOST_WIDE_INT signmask;
483 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486 if (SHIFT_COUNT_TRUNCATED)
489 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
491 /* Shifting by the host word size is undefined according to the
492 ANSI standard, so we must handle this as a special case. */
496 else if (count >= HOST_BITS_PER_WIDE_INT)
499 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
503 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
505 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508 /* Zero / sign extend all bits that are beyond the precision. */
510 if (count >= (HOST_WIDE_INT)prec)
515 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
517 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
519 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
520 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
525 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
526 *lv |= signmask << (prec - count);
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531 keeping only PREC bits of result.
532 Rotate right if COUNT is negative.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
537 HOST_WIDE_INT count, unsigned int prec,
538 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
540 unsigned HOST_WIDE_INT s1l, s2l;
541 HOST_WIDE_INT s1h, s2h;
547 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
548 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 /* Rotate the doubleword integer in L1, H1 left by COUNT places
554 keeping only PREC bits of result. COUNT must be positive.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
569 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
576 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
577 CODE is a tree code for a kind of division, one of
578 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
580 It controls how the quotient is rounded to an integer.
581 Return nonzero if the operation overflows.
582 UNS nonzero says do unsigned division. */
585 div_and_round_double (enum tree_code code, int uns,
586 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
587 HOST_WIDE_INT hnum_orig,
588 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
589 HOST_WIDE_INT hden_orig,
590 unsigned HOST_WIDE_INT *lquo,
591 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
595 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
596 HOST_WIDE_INT den[4], quo[4];
598 unsigned HOST_WIDE_INT work;
599 unsigned HOST_WIDE_INT carry = 0;
600 unsigned HOST_WIDE_INT lnum = lnum_orig;
601 HOST_WIDE_INT hnum = hnum_orig;
602 unsigned HOST_WIDE_INT lden = lden_orig;
603 HOST_WIDE_INT hden = hden_orig;
606 if (hden == 0 && lden == 0)
607 overflow = 1, lden = 1;
609 /* Calculate quotient sign and convert operands to unsigned. */
615 /* (minimum integer) / (-1) is the only overflow case. */
616 if (neg_double (lnum, hnum, &lnum, &hnum)
617 && ((HOST_WIDE_INT) lden & hden) == -1)
623 neg_double (lden, hden, &lden, &hden);
627 if (hnum == 0 && hden == 0)
628 { /* single precision */
630 /* This unsigned division rounds toward zero. */
636 { /* trivial case: dividend < divisor */
637 /* hden != 0 already checked. */
644 memset (quo, 0, sizeof quo);
646 memset (num, 0, sizeof num); /* to zero 9th element */
647 memset (den, 0, sizeof den);
649 encode (num, lnum, hnum);
650 encode (den, lden, hden);
652 /* Special code for when the divisor < BASE. */
653 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
655 /* hnum != 0 already checked. */
656 for (i = 4 - 1; i >= 0; i--)
658 work = num[i] + carry * BASE;
659 quo[i] = work / lden;
665 /* Full double precision division,
666 with thanks to Don Knuth's "Seminumerical Algorithms". */
667 int num_hi_sig, den_hi_sig;
668 unsigned HOST_WIDE_INT quo_est, scale;
670 /* Find the highest nonzero divisor digit. */
671 for (i = 4 - 1;; i--)
678 /* Insure that the first digit of the divisor is at least BASE/2.
679 This is required by the quotient digit estimation algorithm. */
681 scale = BASE / (den[den_hi_sig] + 1);
683 { /* scale divisor and dividend */
685 for (i = 0; i <= 4 - 1; i++)
687 work = (num[i] * scale) + carry;
688 num[i] = LOWPART (work);
689 carry = HIGHPART (work);
694 for (i = 0; i <= 4 - 1; i++)
696 work = (den[i] * scale) + carry;
697 den[i] = LOWPART (work);
698 carry = HIGHPART (work);
699 if (den[i] != 0) den_hi_sig = i;
706 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
708 /* Guess the next quotient digit, quo_est, by dividing the first
709 two remaining dividend digits by the high order quotient digit.
710 quo_est is never low and is at most 2 high. */
711 unsigned HOST_WIDE_INT tmp;
713 num_hi_sig = i + den_hi_sig + 1;
714 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
715 if (num[num_hi_sig] != den[den_hi_sig])
716 quo_est = work / den[den_hi_sig];
720 /* Refine quo_est so it's usually correct, and at most one high. */
721 tmp = work - quo_est * den[den_hi_sig];
723 && (den[den_hi_sig - 1] * quo_est
724 > (tmp * BASE + num[num_hi_sig - 2])))
727 /* Try QUO_EST as the quotient digit, by multiplying the
728 divisor by QUO_EST and subtracting from the remaining dividend.
729 Keep in mind that QUO_EST is the I - 1st digit. */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = quo_est * den[j] + carry;
735 carry = HIGHPART (work);
736 work = num[i + j] - LOWPART (work);
737 num[i + j] = LOWPART (work);
738 carry += HIGHPART (work) != 0;
741 /* If quo_est was high by one, then num[i] went negative and
742 we need to correct things. */
743 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 carry = 0; /* add divisor back in */
747 for (j = 0; j <= den_hi_sig; j++)
749 work = num[i + j] + den[j] + carry;
750 carry = HIGHPART (work);
751 num[i + j] = LOWPART (work);
754 num [num_hi_sig] += carry;
757 /* Store the quotient digit. */
762 decode (quo, lquo, hquo);
765 /* If result is negative, make it so. */
767 neg_double (*lquo, *hquo, lquo, hquo);
769 /* Compute trial remainder: rem = num - (quo * den) */
770 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
771 neg_double (*lrem, *hrem, lrem, hrem);
772 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
777 case TRUNC_MOD_EXPR: /* round toward zero */
778 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
782 case FLOOR_MOD_EXPR: /* round toward negative infinity */
783 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
794 case CEIL_MOD_EXPR: /* round toward positive infinity */
795 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
797 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
805 case ROUND_MOD_EXPR: /* round to closest integer */
807 unsigned HOST_WIDE_INT labs_rem = *lrem;
808 HOST_WIDE_INT habs_rem = *hrem;
809 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
810 HOST_WIDE_INT habs_den = hden, htwice;
812 /* Get absolute values. */
814 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
816 neg_double (lden, hden, &labs_den, &habs_den);
818 /* If (2 * abs (lrem) >= abs (lden)) */
819 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
820 labs_rem, habs_rem, <wice, &htwice);
822 if (((unsigned HOST_WIDE_INT) habs_den
823 < (unsigned HOST_WIDE_INT) htwice)
824 || (((unsigned HOST_WIDE_INT) habs_den
825 == (unsigned HOST_WIDE_INT) htwice)
826 && (labs_den < ltwice)))
830 add_double (*lquo, *hquo,
831 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
846 /* Compute true remainder: rem = num - (quo * den) */
847 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
848 neg_double (*lrem, *hrem, lrem, hrem);
849 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
853 /* If ARG2 divides ARG1 with zero remainder, carries out the division
854 of type CODE and returns the quotient.
855 Otherwise returns NULL_TREE. */
858 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
860 unsigned HOST_WIDE_INT int1l, int2l;
861 HOST_WIDE_INT int1h, int2h;
862 unsigned HOST_WIDE_INT quol, reml;
863 HOST_WIDE_INT quoh, remh;
864 tree type = TREE_TYPE (arg1);
865 int uns = TYPE_UNSIGNED (type);
867 int1l = TREE_INT_CST_LOW (arg1);
868 int1h = TREE_INT_CST_HIGH (arg1);
869 int2l = TREE_INT_CST_LOW (arg2);
870 int2h = TREE_INT_CST_HIGH (arg2);
872 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
873 &quol, &quoh, &reml, &remh);
874 if (remh != 0 || reml != 0)
877 return build_int_cst_wide (type, quol, quoh);
880 /* Return true if the built-in mathematical function specified by CODE
881 is odd, i.e. -f(x) == f(-x). */
884 negate_mathfn_p (enum built_in_function code)
888 CASE_FLT_FN (BUILT_IN_ASIN):
889 CASE_FLT_FN (BUILT_IN_ASINH):
890 CASE_FLT_FN (BUILT_IN_ATAN):
891 CASE_FLT_FN (BUILT_IN_ATANH):
892 CASE_FLT_FN (BUILT_IN_CBRT):
893 CASE_FLT_FN (BUILT_IN_ERF):
894 CASE_FLT_FN (BUILT_IN_LLROUND):
895 CASE_FLT_FN (BUILT_IN_LROUND):
896 CASE_FLT_FN (BUILT_IN_ROUND):
897 CASE_FLT_FN (BUILT_IN_SIN):
898 CASE_FLT_FN (BUILT_IN_SINH):
899 CASE_FLT_FN (BUILT_IN_TAN):
900 CASE_FLT_FN (BUILT_IN_TANH):
901 CASE_FLT_FN (BUILT_IN_TRUNC):
904 CASE_FLT_FN (BUILT_IN_LLRINT):
905 CASE_FLT_FN (BUILT_IN_LRINT):
906 CASE_FLT_FN (BUILT_IN_NEARBYINT):
907 CASE_FLT_FN (BUILT_IN_RINT):
908 return !flag_rounding_math;
916 /* Check whether we may negate an integer constant T without causing
920 may_negate_without_overflow_p (tree t)
922 unsigned HOST_WIDE_INT val;
926 gcc_assert (TREE_CODE (t) == INTEGER_CST);
928 type = TREE_TYPE (t);
929 if (TYPE_UNSIGNED (type))
932 prec = TYPE_PRECISION (type);
933 if (prec > HOST_BITS_PER_WIDE_INT)
935 if (TREE_INT_CST_LOW (t) != 0)
937 prec -= HOST_BITS_PER_WIDE_INT;
938 val = TREE_INT_CST_HIGH (t);
941 val = TREE_INT_CST_LOW (t);
942 if (prec < HOST_BITS_PER_WIDE_INT)
943 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
944 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
947 /* Determine whether an expression T can be cheaply negated using
948 the function negate_expr without introducing undefined overflow. */
951 negate_expr_p (tree t)
958 type = TREE_TYPE (t);
961 switch (TREE_CODE (t))
964 if (TYPE_UNSIGNED (type)
965 || (flag_wrapv && ! flag_trapv))
968 /* Check that -CST will not overflow type. */
969 return may_negate_without_overflow_p (t);
971 return INTEGRAL_TYPE_P (type)
972 && (TYPE_UNSIGNED (type)
973 || (flag_wrapv && !flag_trapv));
980 return negate_expr_p (TREE_REALPART (t))
981 && negate_expr_p (TREE_IMAGPART (t));
984 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
985 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
987 /* -(A + B) -> (-B) - A. */
988 if (negate_expr_p (TREE_OPERAND (t, 1))
989 && reorder_operands_p (TREE_OPERAND (t, 0),
990 TREE_OPERAND (t, 1)))
992 /* -(A + B) -> (-A) - B. */
993 return negate_expr_p (TREE_OPERAND (t, 0));
996 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
997 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
998 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
999 && reorder_operands_p (TREE_OPERAND (t, 0),
1000 TREE_OPERAND (t, 1));
1003 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1009 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1010 return negate_expr_p (TREE_OPERAND (t, 1))
1011 || negate_expr_p (TREE_OPERAND (t, 0));
1014 case TRUNC_DIV_EXPR:
1015 case ROUND_DIV_EXPR:
1016 case FLOOR_DIV_EXPR:
1018 case EXACT_DIV_EXPR:
1019 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1021 return negate_expr_p (TREE_OPERAND (t, 1))
1022 || negate_expr_p (TREE_OPERAND (t, 0));
1025 /* Negate -((double)float) as (double)(-float). */
1026 if (TREE_CODE (type) == REAL_TYPE)
1028 tree tem = strip_float_extensions (t);
1030 return negate_expr_p (tem);
1035 /* Negate -f(x) as f(-x). */
1036 if (negate_mathfn_p (builtin_mathfn_code (t)))
1037 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1041 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1042 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1044 tree op1 = TREE_OPERAND (t, 1);
1045 if (TREE_INT_CST_HIGH (op1) == 0
1046 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1047 == TREE_INT_CST_LOW (op1))
1058 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1059 simplification is possible.
1060 If negate_expr_p would return true for T, NULL_TREE will never be
1064 fold_negate_expr (tree t)
1066 tree type = TREE_TYPE (t);
1069 switch (TREE_CODE (t))
1071 /* Convert - (~A) to A + 1. */
1073 if (INTEGRAL_TYPE_P (type))
1074 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1075 build_int_cst (type, 1));
1079 tem = fold_negate_const (t, type);
1080 if (! TREE_OVERFLOW (tem)
1081 || TYPE_UNSIGNED (type)
1087 tem = fold_negate_const (t, type);
1088 /* Two's complement FP formats, such as c4x, may overflow. */
1089 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1095 tree rpart = negate_expr (TREE_REALPART (t));
1096 tree ipart = negate_expr (TREE_IMAGPART (t));
1098 if ((TREE_CODE (rpart) == REAL_CST
1099 && TREE_CODE (ipart) == REAL_CST)
1100 || (TREE_CODE (rpart) == INTEGER_CST
1101 && TREE_CODE (ipart) == INTEGER_CST))
1102 return build_complex (type, rpart, ipart);
1107 return TREE_OPERAND (t, 0);
1110 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1111 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1113 /* -(A + B) -> (-B) - A. */
1114 if (negate_expr_p (TREE_OPERAND (t, 1))
1115 && reorder_operands_p (TREE_OPERAND (t, 0),
1116 TREE_OPERAND (t, 1)))
1118 tem = negate_expr (TREE_OPERAND (t, 1));
1119 return fold_build2 (MINUS_EXPR, type,
1120 tem, TREE_OPERAND (t, 0));
1123 /* -(A + B) -> (-A) - B. */
1124 if (negate_expr_p (TREE_OPERAND (t, 0)))
1126 tem = negate_expr (TREE_OPERAND (t, 0));
1127 return fold_build2 (MINUS_EXPR, type,
1128 tem, TREE_OPERAND (t, 1));
1134 /* - (A - B) -> B - A */
1135 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1136 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1137 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1138 return fold_build2 (MINUS_EXPR, type,
1139 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1143 if (TYPE_UNSIGNED (type))
1149 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1151 tem = TREE_OPERAND (t, 1);
1152 if (negate_expr_p (tem))
1153 return fold_build2 (TREE_CODE (t), type,
1154 TREE_OPERAND (t, 0), negate_expr (tem));
1155 tem = TREE_OPERAND (t, 0);
1156 if (negate_expr_p (tem))
1157 return fold_build2 (TREE_CODE (t), type,
1158 negate_expr (tem), TREE_OPERAND (t, 1));
1162 case TRUNC_DIV_EXPR:
1163 case ROUND_DIV_EXPR:
1164 case FLOOR_DIV_EXPR:
1166 case EXACT_DIV_EXPR:
1167 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1169 tem = TREE_OPERAND (t, 1);
1170 if (negate_expr_p (tem))
1171 return fold_build2 (TREE_CODE (t), type,
1172 TREE_OPERAND (t, 0), negate_expr (tem));
1173 tem = TREE_OPERAND (t, 0);
1174 if (negate_expr_p (tem))
1175 return fold_build2 (TREE_CODE (t), type,
1176 negate_expr (tem), TREE_OPERAND (t, 1));
1181 /* Convert -((double)float) into (double)(-float). */
1182 if (TREE_CODE (type) == REAL_TYPE)
1184 tem = strip_float_extensions (t);
1185 if (tem != t && negate_expr_p (tem))
1186 return negate_expr (tem);
1191 /* Negate -f(x) as f(-x). */
1192 if (negate_mathfn_p (builtin_mathfn_code (t))
1193 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1195 tree fndecl, arg, arglist;
1197 fndecl = get_callee_fndecl (t);
1198 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1199 arglist = build_tree_list (NULL_TREE, arg);
1200 return build_function_call_expr (fndecl, arglist);
1205 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1206 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1208 tree op1 = TREE_OPERAND (t, 1);
1209 if (TREE_INT_CST_HIGH (op1) == 0
1210 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1211 == TREE_INT_CST_LOW (op1))
1213 tree ntype = TYPE_UNSIGNED (type)
1214 ? lang_hooks.types.signed_type (type)
1215 : lang_hooks.types.unsigned_type (type);
1216 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1217 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1218 return fold_convert (type, temp);
1230 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1231 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1232 return NULL_TREE. */
1235 negate_expr (tree t)
1242 type = TREE_TYPE (t);
1243 STRIP_SIGN_NOPS (t);
1245 tem = fold_negate_expr (t);
1247 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1248 return fold_convert (type, tem);
1251 /* Split a tree IN into a constant, literal and variable parts that could be
1252 combined with CODE to make IN. "constant" means an expression with
1253 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1254 commutative arithmetic operation. Store the constant part into *CONP,
1255 the literal in *LITP and return the variable part. If a part isn't
1256 present, set it to null. If the tree does not decompose in this way,
1257 return the entire tree as the variable part and the other parts as null.
1259 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1260 case, we negate an operand that was subtracted. Except if it is a
1261 literal for which we use *MINUS_LITP instead.
1263 If NEGATE_P is true, we are negating all of IN, again except a literal
1264 for which we use *MINUS_LITP instead.
1266 If IN is itself a literal or constant, return it as appropriate.
1268 Note that we do not guarantee that any of the three values will be the
1269 same type as IN, but they will have the same signedness and mode. */
1272 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1273 tree *minus_litp, int negate_p)
1281 /* Strip any conversions that don't change the machine mode or signedness. */
1282 STRIP_SIGN_NOPS (in);
1284 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1286 else if (TREE_CODE (in) == code
1287 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1288 /* We can associate addition and subtraction together (even
1289 though the C standard doesn't say so) for integers because
1290 the value is not affected. For reals, the value might be
1291 affected, so we can't. */
1292 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1293 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1295 tree op0 = TREE_OPERAND (in, 0);
1296 tree op1 = TREE_OPERAND (in, 1);
1297 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1298 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1300 /* First see if either of the operands is a literal, then a constant. */
1301 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1302 *litp = op0, op0 = 0;
1303 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1304 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1306 if (op0 != 0 && TREE_CONSTANT (op0))
1307 *conp = op0, op0 = 0;
1308 else if (op1 != 0 && TREE_CONSTANT (op1))
1309 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1311 /* If we haven't dealt with either operand, this is not a case we can
1312 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1313 if (op0 != 0 && op1 != 0)
1318 var = op1, neg_var_p = neg1_p;
1320 /* Now do any needed negations. */
1322 *minus_litp = *litp, *litp = 0;
1324 *conp = negate_expr (*conp);
1326 var = negate_expr (var);
1328 else if (TREE_CONSTANT (in))
1336 *minus_litp = *litp, *litp = 0;
1337 else if (*minus_litp)
1338 *litp = *minus_litp, *minus_litp = 0;
1339 *conp = negate_expr (*conp);
1340 var = negate_expr (var);
1346 /* Re-associate trees split by the above function. T1 and T2 are either
1347 expressions to associate or null. Return the new expression, if any. If
1348 we build an operation, do it in TYPE and with CODE. */
1351 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1358 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1359 try to fold this since we will have infinite recursion. But do
1360 deal with any NEGATE_EXPRs. */
1361 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1362 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1364 if (code == PLUS_EXPR)
1366 if (TREE_CODE (t1) == NEGATE_EXPR)
1367 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1368 fold_convert (type, TREE_OPERAND (t1, 0)));
1369 else if (TREE_CODE (t2) == NEGATE_EXPR)
1370 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1371 fold_convert (type, TREE_OPERAND (t2, 0)));
1372 else if (integer_zerop (t2))
1373 return fold_convert (type, t1);
1375 else if (code == MINUS_EXPR)
1377 if (integer_zerop (t2))
1378 return fold_convert (type, t1);
1381 return build2 (code, type, fold_convert (type, t1),
1382 fold_convert (type, t2));
1385 return fold_build2 (code, type, fold_convert (type, t1),
1386 fold_convert (type, t2));
1389 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1390 for use in int_const_binop, size_binop and size_diffop. */
1393 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1395 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1397 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1412 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1413 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1414 && TYPE_MODE (type1) == TYPE_MODE (type2);
1418 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1419 to produce a new constant. Return NULL_TREE if we don't know how
1420 to evaluate CODE at compile-time.
1422 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1425 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1427 unsigned HOST_WIDE_INT int1l, int2l;
1428 HOST_WIDE_INT int1h, int2h;
1429 unsigned HOST_WIDE_INT low;
1431 unsigned HOST_WIDE_INT garbagel;
1432 HOST_WIDE_INT garbageh;
1434 tree type = TREE_TYPE (arg1);
1435 int uns = TYPE_UNSIGNED (type);
1437 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1440 int1l = TREE_INT_CST_LOW (arg1);
1441 int1h = TREE_INT_CST_HIGH (arg1);
1442 int2l = TREE_INT_CST_LOW (arg2);
1443 int2h = TREE_INT_CST_HIGH (arg2);
1448 low = int1l | int2l, hi = int1h | int2h;
1452 low = int1l ^ int2l, hi = int1h ^ int2h;
1456 low = int1l & int2l, hi = int1h & int2h;
1462 /* It's unclear from the C standard whether shifts can overflow.
1463 The following code ignores overflow; perhaps a C standard
1464 interpretation ruling is needed. */
1465 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1472 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1477 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1481 neg_double (int2l, int2h, &low, &hi);
1482 add_double (int1l, int1h, low, hi, &low, &hi);
1483 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1487 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1490 case TRUNC_DIV_EXPR:
1491 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1492 case EXACT_DIV_EXPR:
1493 /* This is a shortcut for a common special case. */
1494 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1495 && ! TREE_CONSTANT_OVERFLOW (arg1)
1496 && ! TREE_CONSTANT_OVERFLOW (arg2)
1497 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1499 if (code == CEIL_DIV_EXPR)
1502 low = int1l / int2l, hi = 0;
1506 /* ... fall through ... */
1508 case ROUND_DIV_EXPR:
1509 if (int2h == 0 && int2l == 0)
1511 if (int2h == 0 && int2l == 1)
1513 low = int1l, hi = int1h;
1516 if (int1l == int2l && int1h == int2h
1517 && ! (int1l == 0 && int1h == 0))
1522 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1523 &low, &hi, &garbagel, &garbageh);
1526 case TRUNC_MOD_EXPR:
1527 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1528 /* This is a shortcut for a common special case. */
1529 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1530 && ! TREE_CONSTANT_OVERFLOW (arg1)
1531 && ! TREE_CONSTANT_OVERFLOW (arg2)
1532 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1534 if (code == CEIL_MOD_EXPR)
1536 low = int1l % int2l, hi = 0;
1540 /* ... fall through ... */
1542 case ROUND_MOD_EXPR:
1543 if (int2h == 0 && int2l == 0)
1545 overflow = div_and_round_double (code, uns,
1546 int1l, int1h, int2l, int2h,
1547 &garbagel, &garbageh, &low, &hi);
1553 low = (((unsigned HOST_WIDE_INT) int1h
1554 < (unsigned HOST_WIDE_INT) int2h)
1555 || (((unsigned HOST_WIDE_INT) int1h
1556 == (unsigned HOST_WIDE_INT) int2h)
1559 low = (int1h < int2h
1560 || (int1h == int2h && int1l < int2l));
1562 if (low == (code == MIN_EXPR))
1563 low = int1l, hi = int1h;
1565 low = int2l, hi = int2h;
1572 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1576 /* Propagate overflow flags ourselves. */
1577 if (((!uns || is_sizetype) && overflow)
1578 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1581 TREE_OVERFLOW (t) = 1;
1582 TREE_CONSTANT_OVERFLOW (t) = 1;
1584 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1587 TREE_CONSTANT_OVERFLOW (t) = 1;
1591 t = force_fit_type (t, 1,
1592 ((!uns || is_sizetype) && overflow)
1593 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1594 TREE_CONSTANT_OVERFLOW (arg1)
1595 | TREE_CONSTANT_OVERFLOW (arg2));
1600 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1601 constant. We assume ARG1 and ARG2 have the same data type, or at least
1602 are the same kind of constant and the same machine mode. Return zero if
1603 combining the constants is not allowed in the current operating mode.
1605 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1608 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1610 /* Sanity check for the recursive cases. */
1617 if (TREE_CODE (arg1) == INTEGER_CST)
1618 return int_const_binop (code, arg1, arg2, notrunc);
1620 if (TREE_CODE (arg1) == REAL_CST)
1622 enum machine_mode mode;
1625 REAL_VALUE_TYPE value;
1626 REAL_VALUE_TYPE result;
1630 /* The following codes are handled by real_arithmetic. */
1645 d1 = TREE_REAL_CST (arg1);
1646 d2 = TREE_REAL_CST (arg2);
1648 type = TREE_TYPE (arg1);
1649 mode = TYPE_MODE (type);
1651 /* Don't perform operation if we honor signaling NaNs and
1652 either operand is a NaN. */
1653 if (HONOR_SNANS (mode)
1654 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1657 /* Don't perform operation if it would raise a division
1658 by zero exception. */
1659 if (code == RDIV_EXPR
1660 && REAL_VALUES_EQUAL (d2, dconst0)
1661 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1664 /* If either operand is a NaN, just return it. Otherwise, set up
1665 for floating-point trap; we return an overflow. */
1666 if (REAL_VALUE_ISNAN (d1))
1668 else if (REAL_VALUE_ISNAN (d2))
1671 inexact = real_arithmetic (&value, code, &d1, &d2);
1672 real_convert (&result, mode, &value);
1674 /* Don't constant fold this floating point operation if
1675 the result has overflowed and flag_trapping_math. */
1676 if (flag_trapping_math
1677 && MODE_HAS_INFINITIES (mode)
1678 && REAL_VALUE_ISINF (result)
1679 && !REAL_VALUE_ISINF (d1)
1680 && !REAL_VALUE_ISINF (d2))
1683 /* Don't constant fold this floating point operation if the
1684 result may dependent upon the run-time rounding mode and
1685 flag_rounding_math is set, or if GCC's software emulation
1686 is unable to accurately represent the result. */
1687 if ((flag_rounding_math
1688 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1689 && !flag_unsafe_math_optimizations))
1690 && (inexact || !real_identical (&result, &value)))
1693 t = build_real (type, result);
1695 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1696 TREE_CONSTANT_OVERFLOW (t)
1698 | TREE_CONSTANT_OVERFLOW (arg1)
1699 | TREE_CONSTANT_OVERFLOW (arg2);
1703 if (TREE_CODE (arg1) == COMPLEX_CST)
1705 tree type = TREE_TYPE (arg1);
1706 tree r1 = TREE_REALPART (arg1);
1707 tree i1 = TREE_IMAGPART (arg1);
1708 tree r2 = TREE_REALPART (arg2);
1709 tree i2 = TREE_IMAGPART (arg2);
1716 real = const_binop (code, r1, r2, notrunc);
1717 imag = const_binop (code, i1, i2, notrunc);
1721 real = const_binop (MINUS_EXPR,
1722 const_binop (MULT_EXPR, r1, r2, notrunc),
1723 const_binop (MULT_EXPR, i1, i2, notrunc),
1725 imag = const_binop (PLUS_EXPR,
1726 const_binop (MULT_EXPR, r1, i2, notrunc),
1727 const_binop (MULT_EXPR, i1, r2, notrunc),
1734 = const_binop (PLUS_EXPR,
1735 const_binop (MULT_EXPR, r2, r2, notrunc),
1736 const_binop (MULT_EXPR, i2, i2, notrunc),
1739 = const_binop (PLUS_EXPR,
1740 const_binop (MULT_EXPR, r1, r2, notrunc),
1741 const_binop (MULT_EXPR, i1, i2, notrunc),
1744 = const_binop (MINUS_EXPR,
1745 const_binop (MULT_EXPR, i1, r2, notrunc),
1746 const_binop (MULT_EXPR, r1, i2, notrunc),
1749 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1750 code = TRUNC_DIV_EXPR;
1752 real = const_binop (code, t1, magsquared, notrunc);
1753 imag = const_binop (code, t2, magsquared, notrunc);
1762 return build_complex (type, real, imag);
1768 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1769 indicates which particular sizetype to create. */
1772 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1774 return build_int_cst (sizetype_tab[(int) kind], number);
1777 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1778 is a tree code. The type of the result is taken from the operands.
1779 Both must be equivalent integer types, ala int_binop_types_match_p.
1780 If the operands are constant, so is the result. */
1783 size_binop (enum tree_code code, tree arg0, tree arg1)
1785 tree type = TREE_TYPE (arg0);
1787 if (arg0 == error_mark_node || arg1 == error_mark_node)
1788 return error_mark_node;
1790 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1793 /* Handle the special case of two integer constants faster. */
1794 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1796 /* And some specific cases even faster than that. */
1797 if (code == PLUS_EXPR && integer_zerop (arg0))
1799 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1800 && integer_zerop (arg1))
1802 else if (code == MULT_EXPR && integer_onep (arg0))
1805 /* Handle general case of two integer constants. */
1806 return int_const_binop (code, arg0, arg1, 0);
1809 return fold_build2 (code, type, arg0, arg1);
1812 /* Given two values, either both of sizetype or both of bitsizetype,
1813 compute the difference between the two values. Return the value
1814 in signed type corresponding to the type of the operands. */
1817 size_diffop (tree arg0, tree arg1)
1819 tree type = TREE_TYPE (arg0);
1822 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1825 /* If the type is already signed, just do the simple thing. */
1826 if (!TYPE_UNSIGNED (type))
1827 return size_binop (MINUS_EXPR, arg0, arg1);
1829 if (type == sizetype)
1831 else if (type == bitsizetype)
1832 ctype = sbitsizetype;
1834 ctype = lang_hooks.types.signed_type (type);
1836 /* If either operand is not a constant, do the conversions to the signed
1837 type and subtract. The hardware will do the right thing with any
1838 overflow in the subtraction. */
1839 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1840 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1841 fold_convert (ctype, arg1));
1843 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1844 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1845 overflow) and negate (which can't either). Special-case a result
1846 of zero while we're here. */
1847 if (tree_int_cst_equal (arg0, arg1))
1848 return build_int_cst (ctype, 0);
1849 else if (tree_int_cst_lt (arg1, arg0))
1850 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1852 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1853 fold_convert (ctype, size_binop (MINUS_EXPR,
1857 /* A subroutine of fold_convert_const handling conversions of an
1858 INTEGER_CST to another integer type. */
1861 fold_convert_const_int_from_int (tree type, tree arg1)
1865 /* Given an integer constant, make new constant with new type,
1866 appropriately sign-extended or truncated. */
1867 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1868 TREE_INT_CST_HIGH (arg1));
1870 t = force_fit_type (t,
1871 /* Don't set the overflow when
1872 converting a pointer */
1873 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1874 (TREE_INT_CST_HIGH (arg1) < 0
1875 && (TYPE_UNSIGNED (type)
1876 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1877 | TREE_OVERFLOW (arg1),
1878 TREE_CONSTANT_OVERFLOW (arg1));
1883 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1884 to an integer type. */
1887 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1892 /* The following code implements the floating point to integer
1893 conversion rules required by the Java Language Specification,
1894 that IEEE NaNs are mapped to zero and values that overflow
1895 the target precision saturate, i.e. values greater than
1896 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1897 are mapped to INT_MIN. These semantics are allowed by the
1898 C and C++ standards that simply state that the behavior of
1899 FP-to-integer conversion is unspecified upon overflow. */
1901 HOST_WIDE_INT high, low;
1903 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1907 case FIX_TRUNC_EXPR:
1908 real_trunc (&r, VOIDmode, &x);
1915 /* If R is NaN, return zero and show we have an overflow. */
1916 if (REAL_VALUE_ISNAN (r))
1923 /* See if R is less than the lower bound or greater than the
1928 tree lt = TYPE_MIN_VALUE (type);
1929 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1930 if (REAL_VALUES_LESS (r, l))
1933 high = TREE_INT_CST_HIGH (lt);
1934 low = TREE_INT_CST_LOW (lt);
1940 tree ut = TYPE_MAX_VALUE (type);
1943 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1944 if (REAL_VALUES_LESS (u, r))
1947 high = TREE_INT_CST_HIGH (ut);
1948 low = TREE_INT_CST_LOW (ut);
1954 REAL_VALUE_TO_INT (&low, &high, r);
1956 t = build_int_cst_wide (type, low, high);
1958 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1959 TREE_CONSTANT_OVERFLOW (arg1));
1963 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1964 to another floating point type. */
1967 fold_convert_const_real_from_real (tree type, tree arg1)
1969 REAL_VALUE_TYPE value;
1972 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1973 t = build_real (type, value);
1975 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1976 TREE_CONSTANT_OVERFLOW (t)
1977 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1981 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1982 type TYPE. If no simplification can be done return NULL_TREE. */
1985 fold_convert_const (enum tree_code code, tree type, tree arg1)
1987 if (TREE_TYPE (arg1) == type)
1990 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1992 if (TREE_CODE (arg1) == INTEGER_CST)
1993 return fold_convert_const_int_from_int (type, arg1);
1994 else if (TREE_CODE (arg1) == REAL_CST)
1995 return fold_convert_const_int_from_real (code, type, arg1);
1997 else if (TREE_CODE (type) == REAL_TYPE)
1999 if (TREE_CODE (arg1) == INTEGER_CST)
2000 return build_real_from_int_cst (type, arg1);
2001 if (TREE_CODE (arg1) == REAL_CST)
2002 return fold_convert_const_real_from_real (type, arg1);
2007 /* Construct a vector of zero elements of vector type TYPE. */
2010 build_zero_vector (tree type)
2015 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2016 units = TYPE_VECTOR_SUBPARTS (type);
2019 for (i = 0; i < units; i++)
2020 list = tree_cons (NULL_TREE, elem, list);
2021 return build_vector (type, list);
2024 /* Convert expression ARG to type TYPE. Used by the middle-end for
2025 simple conversions in preference to calling the front-end's convert. */
2028 fold_convert (tree type, tree arg)
2030 tree orig = TREE_TYPE (arg);
2036 if (TREE_CODE (arg) == ERROR_MARK
2037 || TREE_CODE (type) == ERROR_MARK
2038 || TREE_CODE (orig) == ERROR_MARK)
2039 return error_mark_node;
2041 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2042 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2043 TYPE_MAIN_VARIANT (orig)))
2044 return fold_build1 (NOP_EXPR, type, arg);
2046 switch (TREE_CODE (type))
2048 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2049 case POINTER_TYPE: case REFERENCE_TYPE:
2051 if (TREE_CODE (arg) == INTEGER_CST)
2053 tem = fold_convert_const (NOP_EXPR, type, arg);
2054 if (tem != NULL_TREE)
2057 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2058 || TREE_CODE (orig) == OFFSET_TYPE)
2059 return fold_build1 (NOP_EXPR, type, arg);
2060 if (TREE_CODE (orig) == COMPLEX_TYPE)
2062 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2063 return fold_convert (type, tem);
2065 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2066 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2067 return fold_build1 (NOP_EXPR, type, arg);
2070 if (TREE_CODE (arg) == INTEGER_CST)
2072 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2073 if (tem != NULL_TREE)
2076 else if (TREE_CODE (arg) == REAL_CST)
2078 tem = fold_convert_const (NOP_EXPR, type, arg);
2079 if (tem != NULL_TREE)
2083 switch (TREE_CODE (orig))
2086 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2087 case POINTER_TYPE: case REFERENCE_TYPE:
2088 return fold_build1 (FLOAT_EXPR, type, arg);
2091 return fold_build1 (NOP_EXPR, type, arg);
2094 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2095 return fold_convert (type, tem);
2102 switch (TREE_CODE (orig))
2105 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2106 case POINTER_TYPE: case REFERENCE_TYPE:
2108 return build2 (COMPLEX_EXPR, type,
2109 fold_convert (TREE_TYPE (type), arg),
2110 fold_convert (TREE_TYPE (type), integer_zero_node));
2115 if (TREE_CODE (arg) == COMPLEX_EXPR)
2117 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2118 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2119 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2122 arg = save_expr (arg);
2123 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2124 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2125 rpart = fold_convert (TREE_TYPE (type), rpart);
2126 ipart = fold_convert (TREE_TYPE (type), ipart);
2127 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2135 if (integer_zerop (arg))
2136 return build_zero_vector (type);
2137 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2138 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2139 || TREE_CODE (orig) == VECTOR_TYPE);
2140 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2143 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2150 /* Return false if expr can be assumed not to be an lvalue, true
2154 maybe_lvalue_p (tree x)
2156 /* We only need to wrap lvalue tree codes. */
2157 switch (TREE_CODE (x))
2168 case ALIGN_INDIRECT_REF:
2169 case MISALIGNED_INDIRECT_REF:
2171 case ARRAY_RANGE_REF:
2177 case PREINCREMENT_EXPR:
2178 case PREDECREMENT_EXPR:
2180 case TRY_CATCH_EXPR:
2181 case WITH_CLEANUP_EXPR:
2192 /* Assume the worst for front-end tree codes. */
2193 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2201 /* Return an expr equal to X but certainly not valid as an lvalue. */
2206 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2211 if (! maybe_lvalue_p (x))
2213 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2216 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2217 Zero means allow extended lvalues. */
2219 int pedantic_lvalues;
2221 /* When pedantic, return an expr equal to X but certainly not valid as a
2222 pedantic lvalue. Otherwise, return X. */
2225 pedantic_non_lvalue (tree x)
2227 if (pedantic_lvalues)
2228 return non_lvalue (x);
2233 /* Given a tree comparison code, return the code that is the logical inverse
2234 of the given code. It is not safe to do this for floating-point
2235 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2236 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2239 invert_tree_comparison (enum tree_code code, bool honor_nans)
2241 if (honor_nans && flag_trapping_math)
2251 return honor_nans ? UNLE_EXPR : LE_EXPR;
2253 return honor_nans ? UNLT_EXPR : LT_EXPR;
2255 return honor_nans ? UNGE_EXPR : GE_EXPR;
2257 return honor_nans ? UNGT_EXPR : GT_EXPR;
2271 return UNORDERED_EXPR;
2272 case UNORDERED_EXPR:
2273 return ORDERED_EXPR;
2279 /* Similar, but return the comparison that results if the operands are
2280 swapped. This is safe for floating-point. */
2283 swap_tree_comparison (enum tree_code code)
2290 case UNORDERED_EXPR:
2316 /* Convert a comparison tree code from an enum tree_code representation
2317 into a compcode bit-based encoding. This function is the inverse of
2318 compcode_to_comparison. */
2320 static enum comparison_code
2321 comparison_to_compcode (enum tree_code code)
2338 return COMPCODE_ORD;
2339 case UNORDERED_EXPR:
2340 return COMPCODE_UNORD;
2342 return COMPCODE_UNLT;
2344 return COMPCODE_UNEQ;
2346 return COMPCODE_UNLE;
2348 return COMPCODE_UNGT;
2350 return COMPCODE_LTGT;
2352 return COMPCODE_UNGE;
2358 /* Convert a compcode bit-based encoding of a comparison operator back
2359 to GCC's enum tree_code representation. This function is the
2360 inverse of comparison_to_compcode. */
2362 static enum tree_code
2363 compcode_to_comparison (enum comparison_code code)
2380 return ORDERED_EXPR;
2381 case COMPCODE_UNORD:
2382 return UNORDERED_EXPR;
2400 /* Return a tree for the comparison which is the combination of
2401 doing the AND or OR (depending on CODE) of the two operations LCODE
2402 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2403 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2404 if this makes the transformation invalid. */
2407 combine_comparisons (enum tree_code code, enum tree_code lcode,
2408 enum tree_code rcode, tree truth_type,
2409 tree ll_arg, tree lr_arg)
2411 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2412 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2413 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2414 enum comparison_code compcode;
2418 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2419 compcode = lcompcode & rcompcode;
2422 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2423 compcode = lcompcode | rcompcode;
2432 /* Eliminate unordered comparisons, as well as LTGT and ORD
2433 which are not used unless the mode has NaNs. */
2434 compcode &= ~COMPCODE_UNORD;
2435 if (compcode == COMPCODE_LTGT)
2436 compcode = COMPCODE_NE;
2437 else if (compcode == COMPCODE_ORD)
2438 compcode = COMPCODE_TRUE;
2440 else if (flag_trapping_math)
2442 /* Check that the original operation and the optimized ones will trap
2443 under the same condition. */
2444 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2445 && (lcompcode != COMPCODE_EQ)
2446 && (lcompcode != COMPCODE_ORD);
2447 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2448 && (rcompcode != COMPCODE_EQ)
2449 && (rcompcode != COMPCODE_ORD);
2450 bool trap = (compcode & COMPCODE_UNORD) == 0
2451 && (compcode != COMPCODE_EQ)
2452 && (compcode != COMPCODE_ORD);
2454 /* In a short-circuited boolean expression the LHS might be
2455 such that the RHS, if evaluated, will never trap. For
2456 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2457 if neither x nor y is NaN. (This is a mixed blessing: for
2458 example, the expression above will never trap, hence
2459 optimizing it to x < y would be invalid). */
2460 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2461 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2464 /* If the comparison was short-circuited, and only the RHS
2465 trapped, we may now generate a spurious trap. */
2467 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2470 /* If we changed the conditions that cause a trap, we lose. */
2471 if ((ltrap || rtrap) != trap)
2475 if (compcode == COMPCODE_TRUE)
2476 return constant_boolean_node (true, truth_type);
2477 else if (compcode == COMPCODE_FALSE)
2478 return constant_boolean_node (false, truth_type);
2480 return fold_build2 (compcode_to_comparison (compcode),
2481 truth_type, ll_arg, lr_arg);
2484 /* Return nonzero if CODE is a tree code that represents a truth value. */
2487 truth_value_p (enum tree_code code)
2489 return (TREE_CODE_CLASS (code) == tcc_comparison
2490 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2491 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2492 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2495 /* Return nonzero if two operands (typically of the same tree node)
2496 are necessarily equal. If either argument has side-effects this
2497 function returns zero. FLAGS modifies behavior as follows:
2499 If OEP_ONLY_CONST is set, only return nonzero for constants.
2500 This function tests whether the operands are indistinguishable;
2501 it does not test whether they are equal using C's == operation.
2502 The distinction is important for IEEE floating point, because
2503 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2504 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2506 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2507 even though it may hold multiple values during a function.
2508 This is because a GCC tree node guarantees that nothing else is
2509 executed between the evaluation of its "operands" (which may often
2510 be evaluated in arbitrary order). Hence if the operands themselves
2511 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2512 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2513 unset means assuming isochronic (or instantaneous) tree equivalence.
2514 Unless comparing arbitrary expression trees, such as from different
2515 statements, this flag can usually be left unset.
2517 If OEP_PURE_SAME is set, then pure functions with identical arguments
2518 are considered the same. It is used when the caller has other ways
2519 to ensure that global memory is unchanged in between. */
2522 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2524 /* If either is ERROR_MARK, they aren't equal. */
2525 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2528 /* If both types don't have the same signedness, then we can't consider
2529 them equal. We must check this before the STRIP_NOPS calls
2530 because they may change the signedness of the arguments. */
2531 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2534 /* If both types don't have the same precision, then it is not safe
2536 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2542 /* In case both args are comparisons but with different comparison
2543 code, try to swap the comparison operands of one arg to produce
2544 a match and compare that variant. */
2545 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2546 && COMPARISON_CLASS_P (arg0)
2547 && COMPARISON_CLASS_P (arg1))
2549 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2551 if (TREE_CODE (arg0) == swap_code)
2552 return operand_equal_p (TREE_OPERAND (arg0, 0),
2553 TREE_OPERAND (arg1, 1), flags)
2554 && operand_equal_p (TREE_OPERAND (arg0, 1),
2555 TREE_OPERAND (arg1, 0), flags);
2558 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2559 /* This is needed for conversions and for COMPONENT_REF.
2560 Might as well play it safe and always test this. */
2561 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2562 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2563 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2566 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2567 We don't care about side effects in that case because the SAVE_EXPR
2568 takes care of that for us. In all other cases, two expressions are
2569 equal if they have no side effects. If we have two identical
2570 expressions with side effects that should be treated the same due
2571 to the only side effects being identical SAVE_EXPR's, that will
2572 be detected in the recursive calls below. */
2573 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2574 && (TREE_CODE (arg0) == SAVE_EXPR
2575 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2578 /* Next handle constant cases, those for which we can return 1 even
2579 if ONLY_CONST is set. */
2580 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2581 switch (TREE_CODE (arg0))
2584 return tree_int_cst_equal (arg0, arg1);
2587 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2588 TREE_REAL_CST (arg1));
2594 v1 = TREE_VECTOR_CST_ELTS (arg0);
2595 v2 = TREE_VECTOR_CST_ELTS (arg1);
2598 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2601 v1 = TREE_CHAIN (v1);
2602 v2 = TREE_CHAIN (v2);
2609 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2611 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2615 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2616 && ! memcmp (TREE_STRING_POINTER (arg0),
2617 TREE_STRING_POINTER (arg1),
2618 TREE_STRING_LENGTH (arg0)));
2621 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2627 if (flags & OEP_ONLY_CONST)
2630 /* Define macros to test an operand from arg0 and arg1 for equality and a
2631 variant that allows null and views null as being different from any
2632 non-null value. In the latter case, if either is null, the both
2633 must be; otherwise, do the normal comparison. */
2634 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2635 TREE_OPERAND (arg1, N), flags)
2637 #define OP_SAME_WITH_NULL(N) \
2638 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2639 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2641 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2644 /* Two conversions are equal only if signedness and modes match. */
2645 switch (TREE_CODE (arg0))
2649 case FIX_TRUNC_EXPR:
2650 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2651 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2661 case tcc_comparison:
2663 if (OP_SAME (0) && OP_SAME (1))
2666 /* For commutative ops, allow the other order. */
2667 return (commutative_tree_code (TREE_CODE (arg0))
2668 && operand_equal_p (TREE_OPERAND (arg0, 0),
2669 TREE_OPERAND (arg1, 1), flags)
2670 && operand_equal_p (TREE_OPERAND (arg0, 1),
2671 TREE_OPERAND (arg1, 0), flags));
2674 /* If either of the pointer (or reference) expressions we are
2675 dereferencing contain a side effect, these cannot be equal. */
2676 if (TREE_SIDE_EFFECTS (arg0)
2677 || TREE_SIDE_EFFECTS (arg1))
2680 switch (TREE_CODE (arg0))
2683 case ALIGN_INDIRECT_REF:
2684 case MISALIGNED_INDIRECT_REF:
2690 case ARRAY_RANGE_REF:
2691 /* Operands 2 and 3 may be null. */
2694 && OP_SAME_WITH_NULL (2)
2695 && OP_SAME_WITH_NULL (3));
2698 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2699 may be NULL when we're called to compare MEM_EXPRs. */
2700 return OP_SAME_WITH_NULL (0)
2702 && OP_SAME_WITH_NULL (2);
2705 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2711 case tcc_expression:
2712 switch (TREE_CODE (arg0))
2715 case TRUTH_NOT_EXPR:
2718 case TRUTH_ANDIF_EXPR:
2719 case TRUTH_ORIF_EXPR:
2720 return OP_SAME (0) && OP_SAME (1);
2722 case TRUTH_AND_EXPR:
2724 case TRUTH_XOR_EXPR:
2725 if (OP_SAME (0) && OP_SAME (1))
2728 /* Otherwise take into account this is a commutative operation. */
2729 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2730 TREE_OPERAND (arg1, 1), flags)
2731 && operand_equal_p (TREE_OPERAND (arg0, 1),
2732 TREE_OPERAND (arg1, 0), flags));
2735 /* If the CALL_EXPRs call different functions, then they
2736 clearly can not be equal. */
2741 unsigned int cef = call_expr_flags (arg0);
2742 if (flags & OEP_PURE_SAME)
2743 cef &= ECF_CONST | ECF_PURE;
2750 /* Now see if all the arguments are the same. operand_equal_p
2751 does not handle TREE_LIST, so we walk the operands here
2752 feeding them to operand_equal_p. */
2753 arg0 = TREE_OPERAND (arg0, 1);
2754 arg1 = TREE_OPERAND (arg1, 1);
2755 while (arg0 && arg1)
2757 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2761 arg0 = TREE_CHAIN (arg0);
2762 arg1 = TREE_CHAIN (arg1);
2765 /* If we get here and both argument lists are exhausted
2766 then the CALL_EXPRs are equal. */
2767 return ! (arg0 || arg1);
2773 case tcc_declaration:
2774 /* Consider __builtin_sqrt equal to sqrt. */
2775 return (TREE_CODE (arg0) == FUNCTION_DECL
2776 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2777 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2778 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2785 #undef OP_SAME_WITH_NULL
2788 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2789 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2791 When in doubt, return 0. */
2794 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2796 int unsignedp1, unsignedpo;
2797 tree primarg0, primarg1, primother;
2798 unsigned int correct_width;
2800 if (operand_equal_p (arg0, arg1, 0))
2803 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2804 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2807 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2808 and see if the inner values are the same. This removes any
2809 signedness comparison, which doesn't matter here. */
2810 primarg0 = arg0, primarg1 = arg1;
2811 STRIP_NOPS (primarg0);
2812 STRIP_NOPS (primarg1);
2813 if (operand_equal_p (primarg0, primarg1, 0))
2816 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2817 actual comparison operand, ARG0.
2819 First throw away any conversions to wider types
2820 already present in the operands. */
2822 primarg1 = get_narrower (arg1, &unsignedp1);
2823 primother = get_narrower (other, &unsignedpo);
2825 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2826 if (unsignedp1 == unsignedpo
2827 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2828 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2830 tree type = TREE_TYPE (arg0);
2832 /* Make sure shorter operand is extended the right way
2833 to match the longer operand. */
2834 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2835 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2837 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2844 /* See if ARG is an expression that is either a comparison or is performing
2845 arithmetic on comparisons. The comparisons must only be comparing
2846 two different values, which will be stored in *CVAL1 and *CVAL2; if
2847 they are nonzero it means that some operands have already been found.
2848 No variables may be used anywhere else in the expression except in the
2849 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2850 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2852 If this is true, return 1. Otherwise, return zero. */
2855 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2857 enum tree_code code = TREE_CODE (arg);
2858 enum tree_code_class class = TREE_CODE_CLASS (code);
2860 /* We can handle some of the tcc_expression cases here. */
2861 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2863 else if (class == tcc_expression
2864 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2865 || code == COMPOUND_EXPR))
2868 else if (class == tcc_expression && code == SAVE_EXPR
2869 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2871 /* If we've already found a CVAL1 or CVAL2, this expression is
2872 two complex to handle. */
2873 if (*cval1 || *cval2)
2883 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2886 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2887 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2888 cval1, cval2, save_p));
2893 case tcc_expression:
2894 if (code == COND_EXPR)
2895 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2896 cval1, cval2, save_p)
2897 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2898 cval1, cval2, save_p)
2899 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2900 cval1, cval2, save_p));
2903 case tcc_comparison:
2904 /* First see if we can handle the first operand, then the second. For
2905 the second operand, we know *CVAL1 can't be zero. It must be that
2906 one side of the comparison is each of the values; test for the
2907 case where this isn't true by failing if the two operands
2910 if (operand_equal_p (TREE_OPERAND (arg, 0),
2911 TREE_OPERAND (arg, 1), 0))
2915 *cval1 = TREE_OPERAND (arg, 0);
2916 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2918 else if (*cval2 == 0)
2919 *cval2 = TREE_OPERAND (arg, 0);
2920 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2925 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2927 else if (*cval2 == 0)
2928 *cval2 = TREE_OPERAND (arg, 1);
2929 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2941 /* ARG is a tree that is known to contain just arithmetic operations and
2942 comparisons. Evaluate the operations in the tree substituting NEW0 for
2943 any occurrence of OLD0 as an operand of a comparison and likewise for
2947 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2949 tree type = TREE_TYPE (arg);
2950 enum tree_code code = TREE_CODE (arg);
2951 enum tree_code_class class = TREE_CODE_CLASS (code);
2953 /* We can handle some of the tcc_expression cases here. */
2954 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2956 else if (class == tcc_expression
2957 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2963 return fold_build1 (code, type,
2964 eval_subst (TREE_OPERAND (arg, 0),
2965 old0, new0, old1, new1));
2968 return fold_build2 (code, type,
2969 eval_subst (TREE_OPERAND (arg, 0),
2970 old0, new0, old1, new1),
2971 eval_subst (TREE_OPERAND (arg, 1),
2972 old0, new0, old1, new1));
2974 case tcc_expression:
2978 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2981 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2984 return fold_build3 (code, type,
2985 eval_subst (TREE_OPERAND (arg, 0),
2986 old0, new0, old1, new1),
2987 eval_subst (TREE_OPERAND (arg, 1),
2988 old0, new0, old1, new1),
2989 eval_subst (TREE_OPERAND (arg, 2),
2990 old0, new0, old1, new1));
2994 /* Fall through - ??? */
2996 case tcc_comparison:
2998 tree arg0 = TREE_OPERAND (arg, 0);
2999 tree arg1 = TREE_OPERAND (arg, 1);
3001 /* We need to check both for exact equality and tree equality. The
3002 former will be true if the operand has a side-effect. In that
3003 case, we know the operand occurred exactly once. */
3005 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3007 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3010 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3012 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3015 return fold_build2 (code, type, arg0, arg1);
3023 /* Return a tree for the case when the result of an expression is RESULT
3024 converted to TYPE and OMITTED was previously an operand of the expression
3025 but is now not needed (e.g., we folded OMITTED * 0).
3027 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3028 the conversion of RESULT to TYPE. */
3031 omit_one_operand (tree type, tree result, tree omitted)
3033 tree t = fold_convert (type, result);
3035 if (TREE_SIDE_EFFECTS (omitted))
3036 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3038 return non_lvalue (t);
3041 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3044 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3046 tree t = fold_convert (type, result);
3048 if (TREE_SIDE_EFFECTS (omitted))
3049 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3051 return pedantic_non_lvalue (t);
3054 /* Return a tree for the case when the result of an expression is RESULT
3055 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3056 of the expression but are now not needed.
3058 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3059 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3060 evaluated before OMITTED2. Otherwise, if neither has side effects,
3061 just do the conversion of RESULT to TYPE. */
3064 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3066 tree t = fold_convert (type, result);
3068 if (TREE_SIDE_EFFECTS (omitted2))
3069 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3070 if (TREE_SIDE_EFFECTS (omitted1))
3071 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3073 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3077 /* Return a simplified tree node for the truth-negation of ARG. This
3078 never alters ARG itself. We assume that ARG is an operation that
3079 returns a truth value (0 or 1).
3081 FIXME: one would think we would fold the result, but it causes
3082 problems with the dominator optimizer. */
3085 fold_truth_not_expr (tree arg)
3087 tree type = TREE_TYPE (arg);
3088 enum tree_code code = TREE_CODE (arg);
3090 /* If this is a comparison, we can simply invert it, except for
3091 floating-point non-equality comparisons, in which case we just
3092 enclose a TRUTH_NOT_EXPR around what we have. */
3094 if (TREE_CODE_CLASS (code) == tcc_comparison)
3096 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3097 if (FLOAT_TYPE_P (op_type)
3098 && flag_trapping_math
3099 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3100 && code != NE_EXPR && code != EQ_EXPR)
3104 code = invert_tree_comparison (code,
3105 HONOR_NANS (TYPE_MODE (op_type)));
3106 if (code == ERROR_MARK)
3109 return build2 (code, type,
3110 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3117 return constant_boolean_node (integer_zerop (arg), type);
3119 case TRUTH_AND_EXPR:
3120 return build2 (TRUTH_OR_EXPR, type,
3121 invert_truthvalue (TREE_OPERAND (arg, 0)),
3122 invert_truthvalue (TREE_OPERAND (arg, 1)));
3125 return build2 (TRUTH_AND_EXPR, type,
3126 invert_truthvalue (TREE_OPERAND (arg, 0)),
3127 invert_truthvalue (TREE_OPERAND (arg, 1)));
3129 case TRUTH_XOR_EXPR:
3130 /* Here we can invert either operand. We invert the first operand
3131 unless the second operand is a TRUTH_NOT_EXPR in which case our
3132 result is the XOR of the first operand with the inside of the
3133 negation of the second operand. */
3135 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3136 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3137 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3139 return build2 (TRUTH_XOR_EXPR, type,
3140 invert_truthvalue (TREE_OPERAND (arg, 0)),
3141 TREE_OPERAND (arg, 1));
3143 case TRUTH_ANDIF_EXPR:
3144 return build2 (TRUTH_ORIF_EXPR, type,
3145 invert_truthvalue (TREE_OPERAND (arg, 0)),
3146 invert_truthvalue (TREE_OPERAND (arg, 1)));
3148 case TRUTH_ORIF_EXPR:
3149 return build2 (TRUTH_ANDIF_EXPR, type,
3150 invert_truthvalue (TREE_OPERAND (arg, 0)),
3151 invert_truthvalue (TREE_OPERAND (arg, 1)));
3153 case TRUTH_NOT_EXPR:
3154 return TREE_OPERAND (arg, 0);
3158 tree arg1 = TREE_OPERAND (arg, 1);
3159 tree arg2 = TREE_OPERAND (arg, 2);
3160 /* A COND_EXPR may have a throw as one operand, which
3161 then has void type. Just leave void operands
3163 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3164 VOID_TYPE_P (TREE_TYPE (arg1))
3165 ? arg1 : invert_truthvalue (arg1),
3166 VOID_TYPE_P (TREE_TYPE (arg2))
3167 ? arg2 : invert_truthvalue (arg2));
3171 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3172 invert_truthvalue (TREE_OPERAND (arg, 1)));
3174 case NON_LVALUE_EXPR:
3175 return invert_truthvalue (TREE_OPERAND (arg, 0));
3178 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3179 return build1 (TRUTH_NOT_EXPR, type, arg);
3183 return build1 (TREE_CODE (arg), type,
3184 invert_truthvalue (TREE_OPERAND (arg, 0)));
3187 if (!integer_onep (TREE_OPERAND (arg, 1)))
3189 return build2 (EQ_EXPR, type, arg,
3190 build_int_cst (type, 0));
3193 return build1 (TRUTH_NOT_EXPR, type, arg);
3195 case CLEANUP_POINT_EXPR:
3196 return build1 (CLEANUP_POINT_EXPR, type,
3197 invert_truthvalue (TREE_OPERAND (arg, 0)));
3206 /* Return a simplified tree node for the truth-negation of ARG. This
3207 never alters ARG itself. We assume that ARG is an operation that
3208 returns a truth value (0 or 1).
3210 FIXME: one would think we would fold the result, but it causes
3211 problems with the dominator optimizer. */
3214 invert_truthvalue (tree arg)
3218 if (TREE_CODE (arg) == ERROR_MARK)
3221 tem = fold_truth_not_expr (arg);
3223 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3228 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3229 operands are another bit-wise operation with a common input. If so,
3230 distribute the bit operations to save an operation and possibly two if
3231 constants are involved. For example, convert
3232 (A | B) & (A | C) into A | (B & C)
3233 Further simplification will occur if B and C are constants.
3235 If this optimization cannot be done, 0 will be returned. */
3238 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3243 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3244 || TREE_CODE (arg0) == code
3245 || (TREE_CODE (arg0) != BIT_AND_EXPR
3246 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3249 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3251 common = TREE_OPERAND (arg0, 0);
3252 left = TREE_OPERAND (arg0, 1);
3253 right = TREE_OPERAND (arg1, 1);
3255 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3257 common = TREE_OPERAND (arg0, 0);
3258 left = TREE_OPERAND (arg0, 1);
3259 right = TREE_OPERAND (arg1, 0);
3261 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3263 common = TREE_OPERAND (arg0, 1);
3264 left = TREE_OPERAND (arg0, 0);
3265 right = TREE_OPERAND (arg1, 1);
3267 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3269 common = TREE_OPERAND (arg0, 1);
3270 left = TREE_OPERAND (arg0, 0);
3271 right = TREE_OPERAND (arg1, 0);
3276 return fold_build2 (TREE_CODE (arg0), type, common,
3277 fold_build2 (code, type, left, right));
3280 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3281 with code CODE. This optimization is unsafe. */
3283 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3285 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3286 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3288 /* (A / C) +- (B / C) -> (A +- B) / C. */
3290 && operand_equal_p (TREE_OPERAND (arg0, 1),
3291 TREE_OPERAND (arg1, 1), 0))
3292 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3293 fold_build2 (code, type,
3294 TREE_OPERAND (arg0, 0),
3295 TREE_OPERAND (arg1, 0)),
3296 TREE_OPERAND (arg0, 1));
3298 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3299 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3300 TREE_OPERAND (arg1, 0), 0)
3301 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3302 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3304 REAL_VALUE_TYPE r0, r1;
3305 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3306 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3308 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3310 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3311 real_arithmetic (&r0, code, &r0, &r1);
3312 return fold_build2 (MULT_EXPR, type,
3313 TREE_OPERAND (arg0, 0),
3314 build_real (type, r0));
3320 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3321 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3324 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3331 tree size = TYPE_SIZE (TREE_TYPE (inner));
3332 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3333 || POINTER_TYPE_P (TREE_TYPE (inner)))
3334 && host_integerp (size, 0)
3335 && tree_low_cst (size, 0) == bitsize)
3336 return fold_convert (type, inner);
3339 result = build3 (BIT_FIELD_REF, type, inner,
3340 size_int (bitsize), bitsize_int (bitpos));
3342 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3347 /* Optimize a bit-field compare.
3349 There are two cases: First is a compare against a constant and the
3350 second is a comparison of two items where the fields are at the same
3351 bit position relative to the start of a chunk (byte, halfword, word)
3352 large enough to contain it. In these cases we can avoid the shift
3353 implicit in bitfield extractions.
3355 For constants, we emit a compare of the shifted constant with the
3356 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3357 compared. For two fields at the same position, we do the ANDs with the
3358 similar mask and compare the result of the ANDs.
3360 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3361 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3362 are the left and right operands of the comparison, respectively.
3364 If the optimization described above can be done, we return the resulting
3365 tree. Otherwise we return zero. */
3368 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3371 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3372 tree type = TREE_TYPE (lhs);
3373 tree signed_type, unsigned_type;
3374 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3375 enum machine_mode lmode, rmode, nmode;
3376 int lunsignedp, runsignedp;
3377 int lvolatilep = 0, rvolatilep = 0;
3378 tree linner, rinner = NULL_TREE;
3382 /* Get all the information about the extractions being done. If the bit size
3383 if the same as the size of the underlying object, we aren't doing an
3384 extraction at all and so can do nothing. We also don't want to
3385 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3386 then will no longer be able to replace it. */
3387 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3388 &lunsignedp, &lvolatilep, false);
3389 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3390 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3395 /* If this is not a constant, we can only do something if bit positions,
3396 sizes, and signedness are the same. */
3397 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3398 &runsignedp, &rvolatilep, false);
3400 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3401 || lunsignedp != runsignedp || offset != 0
3402 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3406 /* See if we can find a mode to refer to this field. We should be able to,
3407 but fail if we can't. */
3408 nmode = get_best_mode (lbitsize, lbitpos,
3409 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3410 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3411 TYPE_ALIGN (TREE_TYPE (rinner))),
3412 word_mode, lvolatilep || rvolatilep);
3413 if (nmode == VOIDmode)
3416 /* Set signed and unsigned types of the precision of this mode for the
3418 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3419 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3421 /* Compute the bit position and size for the new reference and our offset
3422 within it. If the new reference is the same size as the original, we
3423 won't optimize anything, so return zero. */
3424 nbitsize = GET_MODE_BITSIZE (nmode);
3425 nbitpos = lbitpos & ~ (nbitsize - 1);
3427 if (nbitsize == lbitsize)
3430 if (BYTES_BIG_ENDIAN)
3431 lbitpos = nbitsize - lbitsize - lbitpos;
3433 /* Make the mask to be used against the extracted field. */
3434 mask = build_int_cst (unsigned_type, -1);
3435 mask = force_fit_type (mask, 0, false, false);
3436 mask = fold_convert (unsigned_type, mask);
3437 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3438 mask = const_binop (RSHIFT_EXPR, mask,
3439 size_int (nbitsize - lbitsize - lbitpos), 0);
3442 /* If not comparing with constant, just rework the comparison
3444 return fold_build2 (code, compare_type,
3445 fold_build2 (BIT_AND_EXPR, unsigned_type,
3446 make_bit_field_ref (linner,
3451 fold_build2 (BIT_AND_EXPR, unsigned_type,
3452 make_bit_field_ref (rinner,
3458 /* Otherwise, we are handling the constant case. See if the constant is too
3459 big for the field. Warn and return a tree of for 0 (false) if so. We do
3460 this not only for its own sake, but to avoid having to test for this
3461 error case below. If we didn't, we might generate wrong code.
3463 For unsigned fields, the constant shifted right by the field length should
3464 be all zero. For signed fields, the high-order bits should agree with
3469 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3470 fold_convert (unsigned_type, rhs),
3471 size_int (lbitsize), 0)))
3473 warning (0, "comparison is always %d due to width of bit-field",
3475 return constant_boolean_node (code == NE_EXPR, compare_type);
3480 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3481 size_int (lbitsize - 1), 0);
3482 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3484 warning (0, "comparison is always %d due to width of bit-field",
3486 return constant_boolean_node (code == NE_EXPR, compare_type);
3490 /* Single-bit compares should always be against zero. */
3491 if (lbitsize == 1 && ! integer_zerop (rhs))
3493 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3494 rhs = build_int_cst (type, 0);
3497 /* Make a new bitfield reference, shift the constant over the
3498 appropriate number of bits and mask it with the computed mask
3499 (in case this was a signed field). If we changed it, make a new one. */
3500 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3503 TREE_SIDE_EFFECTS (lhs) = 1;
3504 TREE_THIS_VOLATILE (lhs) = 1;
3507 rhs = const_binop (BIT_AND_EXPR,
3508 const_binop (LSHIFT_EXPR,
3509 fold_convert (unsigned_type, rhs),
3510 size_int (lbitpos), 0),
3513 return build2 (code, compare_type,
3514 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3518 /* Subroutine for fold_truthop: decode a field reference.
3520 If EXP is a comparison reference, we return the innermost reference.
3522 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3523 set to the starting bit number.
3525 If the innermost field can be completely contained in a mode-sized
3526 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3528 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3529 otherwise it is not changed.
3531 *PUNSIGNEDP is set to the signedness of the field.
3533 *PMASK is set to the mask used. This is either contained in a
3534 BIT_AND_EXPR or derived from the width of the field.
3536 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3538 Return 0 if this is not a component reference or is one that we can't
3539 do anything with. */
3542 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3543 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3544 int *punsignedp, int *pvolatilep,
3545 tree *pmask, tree *pand_mask)
3547 tree outer_type = 0;
3549 tree mask, inner, offset;
3551 unsigned int precision;
3553 /* All the optimizations using this function assume integer fields.
3554 There are problems with FP fields since the type_for_size call
3555 below can fail for, e.g., XFmode. */
3556 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3559 /* We are interested in the bare arrangement of bits, so strip everything
3560 that doesn't affect the machine mode. However, record the type of the
3561 outermost expression if it may matter below. */
3562 if (TREE_CODE (exp) == NOP_EXPR
3563 || TREE_CODE (exp) == CONVERT_EXPR
3564 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3565 outer_type = TREE_TYPE (exp);
3568 if (TREE_CODE (exp) == BIT_AND_EXPR)
3570 and_mask = TREE_OPERAND (exp, 1);
3571 exp = TREE_OPERAND (exp, 0);
3572 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3573 if (TREE_CODE (and_mask) != INTEGER_CST)
3577 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3578 punsignedp, pvolatilep, false);
3579 if ((inner == exp && and_mask == 0)
3580 || *pbitsize < 0 || offset != 0
3581 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3584 /* If the number of bits in the reference is the same as the bitsize of
3585 the outer type, then the outer type gives the signedness. Otherwise
3586 (in case of a small bitfield) the signedness is unchanged. */
3587 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3588 *punsignedp = TYPE_UNSIGNED (outer_type);
3590 /* Compute the mask to access the bitfield. */
3591 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3592 precision = TYPE_PRECISION (unsigned_type);
3594 mask = build_int_cst (unsigned_type, -1);
3595 mask = force_fit_type (mask, 0, false, false);
3597 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3598 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3600 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3602 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3603 fold_convert (unsigned_type, and_mask), mask);
3606 *pand_mask = and_mask;
3610 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3614 all_ones_mask_p (tree mask, int size)
3616 tree type = TREE_TYPE (mask);
3617 unsigned int precision = TYPE_PRECISION (type);
3620 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3621 tmask = force_fit_type (tmask, 0, false, false);
3624 tree_int_cst_equal (mask,
3625 const_binop (RSHIFT_EXPR,
3626 const_binop (LSHIFT_EXPR, tmask,
3627 size_int (precision - size),
3629 size_int (precision - size), 0));
3632 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3633 represents the sign bit of EXP's type. If EXP represents a sign
3634 or zero extension, also test VAL against the unextended type.
3635 The return value is the (sub)expression whose sign bit is VAL,
3636 or NULL_TREE otherwise. */
3639 sign_bit_p (tree exp, tree val)
3641 unsigned HOST_WIDE_INT mask_lo, lo;
3642 HOST_WIDE_INT mask_hi, hi;
3646 /* Tree EXP must have an integral type. */
3647 t = TREE_TYPE (exp);
3648 if (! INTEGRAL_TYPE_P (t))
3651 /* Tree VAL must be an integer constant. */
3652 if (TREE_CODE (val) != INTEGER_CST
3653 || TREE_CONSTANT_OVERFLOW (val))
3656 width = TYPE_PRECISION (t);
3657 if (width > HOST_BITS_PER_WIDE_INT)
3659 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3662 mask_hi = ((unsigned HOST_WIDE_INT) -1
3663 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3669 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3672 mask_lo = ((unsigned HOST_WIDE_INT) -1
3673 >> (HOST_BITS_PER_WIDE_INT - width));
3676 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3677 treat VAL as if it were unsigned. */
3678 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3679 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3682 /* Handle extension from a narrower type. */
3683 if (TREE_CODE (exp) == NOP_EXPR
3684 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3685 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3690 /* Subroutine for fold_truthop: determine if an operand is simple enough
3691 to be evaluated unconditionally. */
3694 simple_operand_p (tree exp)
3696 /* Strip any conversions that don't change the machine mode. */
3699 return (CONSTANT_CLASS_P (exp)
3700 || TREE_CODE (exp) == SSA_NAME
3702 && ! TREE_ADDRESSABLE (exp)
3703 && ! TREE_THIS_VOLATILE (exp)
3704 && ! DECL_NONLOCAL (exp)
3705 /* Don't regard global variables as simple. They may be
3706 allocated in ways unknown to the compiler (shared memory,
3707 #pragma weak, etc). */
3708 && ! TREE_PUBLIC (exp)
3709 && ! DECL_EXTERNAL (exp)
3710 /* Loading a static variable is unduly expensive, but global
3711 registers aren't expensive. */
3712 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3715 /* The following functions are subroutines to fold_range_test and allow it to
3716 try to change a logical combination of comparisons into a range test.
3719 X == 2 || X == 3 || X == 4 || X == 5
3723 (unsigned) (X - 2) <= 3
3725 We describe each set of comparisons as being either inside or outside
3726 a range, using a variable named like IN_P, and then describe the
3727 range with a lower and upper bound. If one of the bounds is omitted,
3728 it represents either the highest or lowest value of the type.
3730 In the comments below, we represent a range by two numbers in brackets
3731 preceded by a "+" to designate being inside that range, or a "-" to
3732 designate being outside that range, so the condition can be inverted by
3733 flipping the prefix. An omitted bound is represented by a "-". For
3734 example, "- [-, 10]" means being outside the range starting at the lowest
3735 possible value and ending at 10, in other words, being greater than 10.
3736 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3739 We set up things so that the missing bounds are handled in a consistent
3740 manner so neither a missing bound nor "true" and "false" need to be
3741 handled using a special case. */
3743 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3744 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3745 and UPPER1_P are nonzero if the respective argument is an upper bound
3746 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3747 must be specified for a comparison. ARG1 will be converted to ARG0's
3748 type if both are specified. */
3751 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3752 tree arg1, int upper1_p)
3758 /* If neither arg represents infinity, do the normal operation.
3759 Else, if not a comparison, return infinity. Else handle the special
3760 comparison rules. Note that most of the cases below won't occur, but
3761 are handled for consistency. */
3763 if (arg0 != 0 && arg1 != 0)
3765 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3766 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3768 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3771 if (TREE_CODE_CLASS (code) != tcc_comparison)
3774 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3775 for neither. In real maths, we cannot assume open ended ranges are
3776 the same. But, this is computer arithmetic, where numbers are finite.
3777 We can therefore make the transformation of any unbounded range with
3778 the value Z, Z being greater than any representable number. This permits
3779 us to treat unbounded ranges as equal. */
3780 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3781 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3785 result = sgn0 == sgn1;
3788 result = sgn0 != sgn1;
3791 result = sgn0 < sgn1;
3794 result = sgn0 <= sgn1;
3797 result = sgn0 > sgn1;
3800 result = sgn0 >= sgn1;
3806 return constant_boolean_node (result, type);
3809 /* Given EXP, a logical expression, set the range it is testing into
3810 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3811 actually being tested. *PLOW and *PHIGH will be made of the same type
3812 as the returned expression. If EXP is not a comparison, we will most
3813 likely not be returning a useful value and range. */
3816 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3818 enum tree_code code;
3819 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3820 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3822 tree low, high, n_low, n_high;
3824 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3825 and see if we can refine the range. Some of the cases below may not
3826 happen, but it doesn't seem worth worrying about this. We "continue"
3827 the outer loop when we've changed something; otherwise we "break"
3828 the switch, which will "break" the while. */
3831 low = high = build_int_cst (TREE_TYPE (exp), 0);
3835 code = TREE_CODE (exp);
3836 exp_type = TREE_TYPE (exp);
3838 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3840 if (TREE_CODE_LENGTH (code) > 0)
3841 arg0 = TREE_OPERAND (exp, 0);
3842 if (TREE_CODE_CLASS (code) == tcc_comparison
3843 || TREE_CODE_CLASS (code) == tcc_unary
3844 || TREE_CODE_CLASS (code) == tcc_binary)
3845 arg0_type = TREE_TYPE (arg0);
3846 if (TREE_CODE_CLASS (code) == tcc_binary
3847 || TREE_CODE_CLASS (code) == tcc_comparison
3848 || (TREE_CODE_CLASS (code) == tcc_expression
3849 && TREE_CODE_LENGTH (code) > 1))
3850 arg1 = TREE_OPERAND (exp, 1);
3855 case TRUTH_NOT_EXPR:
3856 in_p = ! in_p, exp = arg0;
3859 case EQ_EXPR: case NE_EXPR:
3860 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3861 /* We can only do something if the range is testing for zero
3862 and if the second operand is an integer constant. Note that
3863 saying something is "in" the range we make is done by
3864 complementing IN_P since it will set in the initial case of
3865 being not equal to zero; "out" is leaving it alone. */
3866 if (low == 0 || high == 0
3867 || ! integer_zerop (low) || ! integer_zerop (high)
3868 || TREE_CODE (arg1) != INTEGER_CST)
3873 case NE_EXPR: /* - [c, c] */
3876 case EQ_EXPR: /* + [c, c] */
3877 in_p = ! in_p, low = high = arg1;
3879 case GT_EXPR: /* - [-, c] */
3880 low = 0, high = arg1;
3882 case GE_EXPR: /* + [c, -] */
3883 in_p = ! in_p, low = arg1, high = 0;
3885 case LT_EXPR: /* - [c, -] */
3886 low = arg1, high = 0;
3888 case LE_EXPR: /* + [-, c] */
3889 in_p = ! in_p, low = 0, high = arg1;
3895 /* If this is an unsigned comparison, we also know that EXP is
3896 greater than or equal to zero. We base the range tests we make
3897 on that fact, so we record it here so we can parse existing
3898 range tests. We test arg0_type since often the return type
3899 of, e.g. EQ_EXPR, is boolean. */
3900 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3902 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3904 build_int_cst (arg0_type, 0),
3908 in_p = n_in_p, low = n_low, high = n_high;
3910 /* If the high bound is missing, but we have a nonzero low
3911 bound, reverse the range so it goes from zero to the low bound
3913 if (high == 0 && low && ! integer_zerop (low))
3916 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3917 integer_one_node, 0);
3918 low = build_int_cst (arg0_type, 0);
3926 /* (-x) IN [a,b] -> x in [-b, -a] */
3927 n_low = range_binop (MINUS_EXPR, exp_type,
3928 build_int_cst (exp_type, 0),
3930 n_high = range_binop (MINUS_EXPR, exp_type,
3931 build_int_cst (exp_type, 0),
3933 low = n_low, high = n_high;
3939 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3940 build_int_cst (exp_type, 1));
3943 case PLUS_EXPR: case MINUS_EXPR:
3944 if (TREE_CODE (arg1) != INTEGER_CST)
3947 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3948 move a constant to the other side. */
3949 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3952 /* If EXP is signed, any overflow in the computation is undefined,
3953 so we don't worry about it so long as our computations on
3954 the bounds don't overflow. For unsigned, overflow is defined
3955 and this is exactly the right thing. */
3956 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3957 arg0_type, low, 0, arg1, 0);
3958 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3959 arg0_type, high, 1, arg1, 0);
3960 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3961 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3964 /* Check for an unsigned range which has wrapped around the maximum
3965 value thus making n_high < n_low, and normalize it. */
3966 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3968 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3969 integer_one_node, 0);
3970 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3971 integer_one_node, 0);
3973 /* If the range is of the form +/- [ x+1, x ], we won't
3974 be able to normalize it. But then, it represents the
3975 whole range or the empty set, so make it
3977 if (tree_int_cst_equal (n_low, low)
3978 && tree_int_cst_equal (n_high, high))
3984 low = n_low, high = n_high;
3989 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3990 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3993 if (! INTEGRAL_TYPE_P (arg0_type)
3994 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3995 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3998 n_low = low, n_high = high;
4001 n_low = fold_convert (arg0_type, n_low);
4004 n_high = fold_convert (arg0_type, n_high);
4007 /* If we're converting arg0 from an unsigned type, to exp,
4008 a signed type, we will be doing the comparison as unsigned.
4009 The tests above have already verified that LOW and HIGH
4012 So we have to ensure that we will handle large unsigned
4013 values the same way that the current signed bounds treat
4016 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4019 tree equiv_type = lang_hooks.types.type_for_mode
4020 (TYPE_MODE (arg0_type), 1);
4022 /* A range without an upper bound is, naturally, unbounded.
4023 Since convert would have cropped a very large value, use
4024 the max value for the destination type. */
4026 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4027 : TYPE_MAX_VALUE (arg0_type);
4029 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4030 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4031 fold_convert (arg0_type,
4033 build_int_cst (arg0_type, 1));
4035 /* If the low bound is specified, "and" the range with the
4036 range for which the original unsigned value will be
4040 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4041 1, n_low, n_high, 1,
4042 fold_convert (arg0_type,
4047 in_p = (n_in_p == in_p);
4051 /* Otherwise, "or" the range with the range of the input
4052 that will be interpreted as negative. */
4053 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4054 0, n_low, n_high, 1,
4055 fold_convert (arg0_type,
4060 in_p = (in_p != n_in_p);
4065 low = n_low, high = n_high;
4075 /* If EXP is a constant, we can evaluate whether this is true or false. */
4076 if (TREE_CODE (exp) == INTEGER_CST)
4078 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4080 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4086 *pin_p = in_p, *plow = low, *phigh = high;
4090 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4091 type, TYPE, return an expression to test if EXP is in (or out of, depending
4092 on IN_P) the range. Return 0 if the test couldn't be created. */
4095 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4097 tree etype = TREE_TYPE (exp);
4100 #ifdef HAVE_canonicalize_funcptr_for_compare
4101 /* Disable this optimization for function pointer expressions
4102 on targets that require function pointer canonicalization. */
4103 if (HAVE_canonicalize_funcptr_for_compare
4104 && TREE_CODE (etype) == POINTER_TYPE
4105 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4111 value = build_range_check (type, exp, 1, low, high);
4113 return invert_truthvalue (value);
4118 if (low == 0 && high == 0)
4119 return build_int_cst (type, 1);
4122 return fold_build2 (LE_EXPR, type, exp,
4123 fold_convert (etype, high));
4126 return fold_build2 (GE_EXPR, type, exp,
4127 fold_convert (etype, low));
4129 if (operand_equal_p (low, high, 0))
4130 return fold_build2 (EQ_EXPR, type, exp,
4131 fold_convert (etype, low));
4133 if (integer_zerop (low))
4135 if (! TYPE_UNSIGNED (etype))
4137 etype = lang_hooks.types.unsigned_type (etype);
4138 high = fold_convert (etype, high);
4139 exp = fold_convert (etype, exp);
4141 return build_range_check (type, exp, 1, 0, high);
4144 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4145 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4147 unsigned HOST_WIDE_INT lo;
4151 prec = TYPE_PRECISION (etype);
4152 if (prec <= HOST_BITS_PER_WIDE_INT)
4155 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4159 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4160 lo = (unsigned HOST_WIDE_INT) -1;
4163 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4165 if (TYPE_UNSIGNED (etype))
4167 etype = lang_hooks.types.signed_type (etype);
4168 exp = fold_convert (etype, exp);
4170 return fold_build2 (GT_EXPR, type, exp,
4171 build_int_cst (etype, 0));
4175 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4176 This requires wrap-around arithmetics for the type of the expression. */
4177 switch (TREE_CODE (etype))
4180 /* There is no requirement that LOW be within the range of ETYPE
4181 if the latter is a subtype. It must, however, be within the base
4182 type of ETYPE. So be sure we do the subtraction in that type. */
4183 if (TREE_TYPE (etype))
4184 etype = TREE_TYPE (etype);
4189 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4190 TYPE_UNSIGNED (etype));
4197 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4198 if (TREE_CODE (etype) == INTEGER_TYPE
4199 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4201 tree utype, minv, maxv;
4203 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4204 for the type in question, as we rely on this here. */
4205 utype = lang_hooks.types.unsigned_type (etype);
4206 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4207 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4208 integer_one_node, 1);
4209 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4211 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4218 high = fold_convert (etype, high);
4219 low = fold_convert (etype, low);
4220 exp = fold_convert (etype, exp);
4222 value = const_binop (MINUS_EXPR, high, low, 0);
4224 if (value != 0 && !TREE_OVERFLOW (value))
4225 return build_range_check (type,
4226 fold_build2 (MINUS_EXPR, etype, exp, low),
4227 1, build_int_cst (etype, 0), value);
4232 /* Return the predecessor of VAL in its type, handling the infinite case. */
4235 range_predecessor (tree val)
4237 tree type = TREE_TYPE (val);
4239 if (INTEGRAL_TYPE_P (type)
4240 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4243 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4246 /* Return the successor of VAL in its type, handling the infinite case. */
4249 range_successor (tree val)
4251 tree type = TREE_TYPE (val);
4253 if (INTEGRAL_TYPE_P (type)
4254 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4257 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4260 /* Given two ranges, see if we can merge them into one. Return 1 if we
4261 can, 0 if we can't. Set the output range into the specified parameters. */
4264 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4265 tree high0, int in1_p, tree low1, tree high1)
4273 int lowequal = ((low0 == 0 && low1 == 0)
4274 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4275 low0, 0, low1, 0)));
4276 int highequal = ((high0 == 0 && high1 == 0)
4277 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4278 high0, 1, high1, 1)));
4280 /* Make range 0 be the range that starts first, or ends last if they
4281 start at the same value. Swap them if it isn't. */
4282 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4285 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4286 high1, 1, high0, 1))))
4288 temp = in0_p, in0_p = in1_p, in1_p = temp;
4289 tem = low0, low0 = low1, low1 = tem;
4290 tem = high0, high0 = high1, high1 = tem;
4293 /* Now flag two cases, whether the ranges are disjoint or whether the
4294 second range is totally subsumed in the first. Note that the tests
4295 below are simplified by the ones above. */
4296 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4297 high0, 1, low1, 0));
4298 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4299 high1, 1, high0, 1));
4301 /* We now have four cases, depending on whether we are including or
4302 excluding the two ranges. */
4305 /* If they don't overlap, the result is false. If the second range
4306 is a subset it is the result. Otherwise, the range is from the start
4307 of the second to the end of the first. */
4309 in_p = 0, low = high = 0;
4311 in_p = 1, low = low1, high = high1;
4313 in_p = 1, low = low1, high = high0;
4316 else if (in0_p && ! in1_p)
4318 /* If they don't overlap, the result is the first range. If they are
4319 equal, the result is false. If the second range is a subset of the
4320 first, and the ranges begin at the same place, we go from just after
4321 the end of the second range to the end of the first. If the second
4322 range is not a subset of the first, or if it is a subset and both
4323 ranges end at the same place, the range starts at the start of the
4324 first range and ends just before the second range.
4325 Otherwise, we can't describe this as a single range. */
4327 in_p = 1, low = low0, high = high0;
4328 else if (lowequal && highequal)
4329 in_p = 0, low = high = 0;
4330 else if (subset && lowequal)
4332 low = range_successor (high1);
4336 else if (! subset || highequal)
4339 high = range_predecessor (low1);
4346 else if (! in0_p && in1_p)
4348 /* If they don't overlap, the result is the second range. If the second
4349 is a subset of the first, the result is false. Otherwise,
4350 the range starts just after the first range and ends at the
4351 end of the second. */
4353 in_p = 1, low = low1, high = high1;
4354 else if (subset || highequal)
4355 in_p = 0, low = high = 0;
4358 low = range_successor (high0);
4366 /* The case where we are excluding both ranges. Here the complex case
4367 is if they don't overlap. In that case, the only time we have a
4368 range is if they are adjacent. If the second is a subset of the
4369 first, the result is the first. Otherwise, the range to exclude
4370 starts at the beginning of the first range and ends at the end of the
4374 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4375 range_successor (high0),
4377 in_p = 0, low = low0, high = high1;
4380 /* Canonicalize - [min, x] into - [-, x]. */
4381 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4382 switch (TREE_CODE (TREE_TYPE (low0)))
4385 if (TYPE_PRECISION (TREE_TYPE (low0))
4386 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4390 if (tree_int_cst_equal (low0,
4391 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4395 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4396 && integer_zerop (low0))
4403 /* Canonicalize - [x, max] into - [x, -]. */
4404 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4405 switch (TREE_CODE (TREE_TYPE (high1)))
4408 if (TYPE_PRECISION (TREE_TYPE (high1))
4409 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4413 if (tree_int_cst_equal (high1,
4414 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4418 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4419 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4421 integer_one_node, 1)))
4428 /* The ranges might be also adjacent between the maximum and
4429 minimum values of the given type. For
4430 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4431 return + [x + 1, y - 1]. */
4432 if (low0 == 0 && high1 == 0)
4434 low = range_successor (high0);
4435 high = range_predecessor (low1);
4436 if (low == 0 || high == 0)
4446 in_p = 0, low = low0, high = high0;
4448 in_p = 0, low = low0, high = high1;
4451 *pin_p = in_p, *plow = low, *phigh = high;
4456 /* Subroutine of fold, looking inside expressions of the form
4457 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4458 of the COND_EXPR. This function is being used also to optimize
4459 A op B ? C : A, by reversing the comparison first.
4461 Return a folded expression whose code is not a COND_EXPR
4462 anymore, or NULL_TREE if no folding opportunity is found. */
4465 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4467 enum tree_code comp_code = TREE_CODE (arg0);
4468 tree arg00 = TREE_OPERAND (arg0, 0);
4469 tree arg01 = TREE_OPERAND (arg0, 1);
4470 tree arg1_type = TREE_TYPE (arg1);
4476 /* If we have A op 0 ? A : -A, consider applying the following
4479 A == 0? A : -A same as -A
4480 A != 0? A : -A same as A
4481 A >= 0? A : -A same as abs (A)
4482 A > 0? A : -A same as abs (A)
4483 A <= 0? A : -A same as -abs (A)
4484 A < 0? A : -A same as -abs (A)
4486 None of these transformations work for modes with signed
4487 zeros. If A is +/-0, the first two transformations will
4488 change the sign of the result (from +0 to -0, or vice
4489 versa). The last four will fix the sign of the result,
4490 even though the original expressions could be positive or
4491 negative, depending on the sign of A.
4493 Note that all these transformations are correct if A is
4494 NaN, since the two alternatives (A and -A) are also NaNs. */
4495 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4496 ? real_zerop (arg01)
4497 : integer_zerop (arg01))
4498 && ((TREE_CODE (arg2) == NEGATE_EXPR
4499 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4500 /* In the case that A is of the form X-Y, '-A' (arg2) may
4501 have already been folded to Y-X, check for that. */
4502 || (TREE_CODE (arg1) == MINUS_EXPR
4503 && TREE_CODE (arg2) == MINUS_EXPR
4504 && operand_equal_p (TREE_OPERAND (arg1, 0),
4505 TREE_OPERAND (arg2, 1), 0)
4506 && operand_equal_p (TREE_OPERAND (arg1, 1),
4507 TREE_OPERAND (arg2, 0), 0))))
4512 tem = fold_convert (arg1_type, arg1);
4513 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4516 return pedantic_non_lvalue (fold_convert (type, arg1));
4519 if (flag_trapping_math)
4524 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4525 arg1 = fold_convert (lang_hooks.types.signed_type
4526 (TREE_TYPE (arg1)), arg1);
4527 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4528 return pedantic_non_lvalue (fold_convert (type, tem));
4531 if (flag_trapping_math)
4535 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4536 arg1 = fold_convert (lang_hooks.types.signed_type
4537 (TREE_TYPE (arg1)), arg1);
4538 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4539 return negate_expr (fold_convert (type, tem));
4541 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4545 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4546 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4547 both transformations are correct when A is NaN: A != 0
4548 is then true, and A == 0 is false. */
4550 if (integer_zerop (arg01) && integer_zerop (arg2))
4552 if (comp_code == NE_EXPR)
4553 return pedantic_non_lvalue (fold_convert (type, arg1));
4554 else if (comp_code == EQ_EXPR)
4555 return build_int_cst (type, 0);
4558 /* Try some transformations of A op B ? A : B.
4560 A == B? A : B same as B
4561 A != B? A : B same as A
4562 A >= B? A : B same as max (A, B)
4563 A > B? A : B same as max (B, A)
4564 A <= B? A : B same as min (A, B)
4565 A < B? A : B same as min (B, A)
4567 As above, these transformations don't work in the presence
4568 of signed zeros. For example, if A and B are zeros of
4569 opposite sign, the first two transformations will change
4570 the sign of the result. In the last four, the original
4571 expressions give different results for (A=+0, B=-0) and
4572 (A=-0, B=+0), but the transformed expressions do not.
4574 The first two transformations are correct if either A or B
4575 is a NaN. In the first transformation, the condition will
4576 be false, and B will indeed be chosen. In the case of the
4577 second transformation, the condition A != B will be true,
4578 and A will be chosen.
4580 The conversions to max() and min() are not correct if B is
4581 a number and A is not. The conditions in the original
4582 expressions will be false, so all four give B. The min()
4583 and max() versions would give a NaN instead. */
4584 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4585 /* Avoid these transformations if the COND_EXPR may be used
4586 as an lvalue in the C++ front-end. PR c++/19199. */
4588 || (strcmp (lang_hooks.name, "GNU C++") != 0
4589 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4590 || ! maybe_lvalue_p (arg1)
4591 || ! maybe_lvalue_p (arg2)))
4593 tree comp_op0 = arg00;
4594 tree comp_op1 = arg01;
4595 tree comp_type = TREE_TYPE (comp_op0);
4597 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4598 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4608 return pedantic_non_lvalue (fold_convert (type, arg2));
4610 return pedantic_non_lvalue (fold_convert (type, arg1));
4615 /* In C++ a ?: expression can be an lvalue, so put the
4616 operand which will be used if they are equal first
4617 so that we can convert this back to the
4618 corresponding COND_EXPR. */
4619 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4621 comp_op0 = fold_convert (comp_type, comp_op0);
4622 comp_op1 = fold_convert (comp_type, comp_op1);
4623 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4624 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4625 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4626 return pedantic_non_lvalue (fold_convert (type, tem));
4633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4635 comp_op0 = fold_convert (comp_type, comp_op0);
4636 comp_op1 = fold_convert (comp_type, comp_op1);
4637 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4638 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4639 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4640 return pedantic_non_lvalue (fold_convert (type, tem));
4644 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4645 return pedantic_non_lvalue (fold_convert (type, arg2));
4648 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4649 return pedantic_non_lvalue (fold_convert (type, arg1));
4652 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4657 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4658 we might still be able to simplify this. For example,
4659 if C1 is one less or one more than C2, this might have started
4660 out as a MIN or MAX and been transformed by this function.
4661 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4663 if (INTEGRAL_TYPE_P (type)
4664 && TREE_CODE (arg01) == INTEGER_CST
4665 && TREE_CODE (arg2) == INTEGER_CST)
4669 /* We can replace A with C1 in this case. */
4670 arg1 = fold_convert (type, arg01);
4671 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4674 /* If C1 is C2 + 1, this is min(A, C2). */
4675 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4677 && operand_equal_p (arg01,
4678 const_binop (PLUS_EXPR, arg2,
4679 build_int_cst (type, 1), 0),
4681 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4686 /* If C1 is C2 - 1, this is min(A, C2). */
4687 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4689 && operand_equal_p (arg01,
4690 const_binop (MINUS_EXPR, arg2,
4691 build_int_cst (type, 1), 0),
4693 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4698 /* If C1 is C2 - 1, this is max(A, C2). */
4699 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4701 && operand_equal_p (arg01,
4702 const_binop (MINUS_EXPR, arg2,
4703 build_int_cst (type, 1), 0),
4705 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4710 /* If C1 is C2 + 1, this is max(A, C2). */
4711 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4713 && operand_equal_p (arg01,
4714 const_binop (PLUS_EXPR, arg2,
4715 build_int_cst (type, 1), 0),
4717 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4731 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4732 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4735 /* EXP is some logical combination of boolean tests. See if we can
4736 merge it into some range test. Return the new tree if so. */
4739 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4741 int or_op = (code == TRUTH_ORIF_EXPR
4742 || code == TRUTH_OR_EXPR);
4743 int in0_p, in1_p, in_p;
4744 tree low0, low1, low, high0, high1, high;
4745 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4746 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4749 /* If this is an OR operation, invert both sides; we will invert
4750 again at the end. */
4752 in0_p = ! in0_p, in1_p = ! in1_p;
4754 /* If both expressions are the same, if we can merge the ranges, and we
4755 can build the range test, return it or it inverted. If one of the
4756 ranges is always true or always false, consider it to be the same
4757 expression as the other. */
4758 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4759 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4761 && 0 != (tem = (build_range_check (type,
4763 : rhs != 0 ? rhs : integer_zero_node,
4765 return or_op ? invert_truthvalue (tem) : tem;
4767 /* On machines where the branch cost is expensive, if this is a
4768 short-circuited branch and the underlying object on both sides
4769 is the same, make a non-short-circuit operation. */
4770 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4771 && lhs != 0 && rhs != 0
4772 && (code == TRUTH_ANDIF_EXPR
4773 || code == TRUTH_ORIF_EXPR)
4774 && operand_equal_p (lhs, rhs, 0))
4776 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4777 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4778 which cases we can't do this. */
4779 if (simple_operand_p (lhs))
4780 return build2 (code == TRUTH_ANDIF_EXPR
4781 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4784 else if (lang_hooks.decls.global_bindings_p () == 0
4785 && ! CONTAINS_PLACEHOLDER_P (lhs))
4787 tree common = save_expr (lhs);
4789 if (0 != (lhs = build_range_check (type, common,
4790 or_op ? ! in0_p : in0_p,
4792 && (0 != (rhs = build_range_check (type, common,
4793 or_op ? ! in1_p : in1_p,
4795 return build2 (code == TRUTH_ANDIF_EXPR
4796 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4804 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4805 bit value. Arrange things so the extra bits will be set to zero if and
4806 only if C is signed-extended to its full width. If MASK is nonzero,
4807 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4810 unextend (tree c, int p, int unsignedp, tree mask)
4812 tree type = TREE_TYPE (c);
4813 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4816 if (p == modesize || unsignedp)
4819 /* We work by getting just the sign bit into the low-order bit, then
4820 into the high-order bit, then sign-extend. We then XOR that value
4822 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4823 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4825 /* We must use a signed type in order to get an arithmetic right shift.
4826 However, we must also avoid introducing accidental overflows, so that
4827 a subsequent call to integer_zerop will work. Hence we must
4828 do the type conversion here. At this point, the constant is either
4829 zero or one, and the conversion to a signed type can never overflow.
4830 We could get an overflow if this conversion is done anywhere else. */
4831 if (TYPE_UNSIGNED (type))
4832 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4834 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4835 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4837 temp = const_binop (BIT_AND_EXPR, temp,
4838 fold_convert (TREE_TYPE (c), mask), 0);
4839 /* If necessary, convert the type back to match the type of C. */
4840 if (TYPE_UNSIGNED (type))
4841 temp = fold_convert (type, temp);
4843 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4846 /* Find ways of folding logical expressions of LHS and RHS:
4847 Try to merge two comparisons to the same innermost item.
4848 Look for range tests like "ch >= '0' && ch <= '9'".
4849 Look for combinations of simple terms on machines with expensive branches
4850 and evaluate the RHS unconditionally.
4852 For example, if we have p->a == 2 && p->b == 4 and we can make an
4853 object large enough to span both A and B, we can do this with a comparison
4854 against the object ANDed with the a mask.
4856 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4857 operations to do this with one comparison.
4859 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4860 function and the one above.
4862 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4863 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4865 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4868 We return the simplified tree or 0 if no optimization is possible. */
4871 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4873 /* If this is the "or" of two comparisons, we can do something if
4874 the comparisons are NE_EXPR. If this is the "and", we can do something
4875 if the comparisons are EQ_EXPR. I.e.,
4876 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4878 WANTED_CODE is this operation code. For single bit fields, we can
4879 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4880 comparison for one-bit fields. */
4882 enum tree_code wanted_code;
4883 enum tree_code lcode, rcode;
4884 tree ll_arg, lr_arg, rl_arg, rr_arg;
4885 tree ll_inner, lr_inner, rl_inner, rr_inner;
4886 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4887 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4888 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4889 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4890 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4891 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4892 enum machine_mode lnmode, rnmode;
4893 tree ll_mask, lr_mask, rl_mask, rr_mask;
4894 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4895 tree l_const, r_const;
4896 tree lntype, rntype, result;
4897 int first_bit, end_bit;
4899 tree orig_lhs = lhs, orig_rhs = rhs;
4900 enum tree_code orig_code = code;
4902 /* Start by getting the comparison codes. Fail if anything is volatile.
4903 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4904 it were surrounded with a NE_EXPR. */
4906 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4909 lcode = TREE_CODE (lhs);
4910 rcode = TREE_CODE (rhs);
4912 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4914 lhs = build2 (NE_EXPR, truth_type, lhs,
4915 build_int_cst (TREE_TYPE (lhs), 0));
4919 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4921 rhs = build2 (NE_EXPR, truth_type, rhs,
4922 build_int_cst (TREE_TYPE (rhs), 0));
4926 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4927 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4930 ll_arg = TREE_OPERAND (lhs, 0);
4931 lr_arg = TREE_OPERAND (lhs, 1);
4932 rl_arg = TREE_OPERAND (rhs, 0);
4933 rr_arg = TREE_OPERAND (rhs, 1);
4935 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4936 if (simple_operand_p (ll_arg)
4937 && simple_operand_p (lr_arg))
4940 if (operand_equal_p (ll_arg, rl_arg, 0)
4941 && operand_equal_p (lr_arg, rr_arg, 0))
4943 result = combine_comparisons (code, lcode, rcode,
4944 truth_type, ll_arg, lr_arg);
4948 else if (operand_equal_p (ll_arg, rr_arg, 0)
4949 && operand_equal_p (lr_arg, rl_arg, 0))
4951 result = combine_comparisons (code, lcode,
4952 swap_tree_comparison (rcode),
4953 truth_type, ll_arg, lr_arg);
4959 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4960 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4962 /* If the RHS can be evaluated unconditionally and its operands are
4963 simple, it wins to evaluate the RHS unconditionally on machines
4964 with expensive branches. In this case, this isn't a comparison
4965 that can be merged. Avoid doing this if the RHS is a floating-point
4966 comparison since those can trap. */
4968 if (BRANCH_COST >= 2
4969 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4970 && simple_operand_p (rl_arg)
4971 && simple_operand_p (rr_arg))
4973 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4974 if (code == TRUTH_OR_EXPR
4975 && lcode == NE_EXPR && integer_zerop (lr_arg)
4976 && rcode == NE_EXPR && integer_zerop (rr_arg)
4977 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4978 return build2 (NE_EXPR, truth_type,
4979 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4981 build_int_cst (TREE_TYPE (ll_arg), 0));
4983 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4984 if (code == TRUTH_AND_EXPR
4985 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4986 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4987 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4988 return build2 (EQ_EXPR, truth_type,
4989 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4991 build_int_cst (TREE_TYPE (ll_arg), 0));
4993 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4995 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
4996 return build2 (code, truth_type, lhs, rhs);
5001 /* See if the comparisons can be merged. Then get all the parameters for
5004 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5005 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5009 ll_inner = decode_field_reference (ll_arg,
5010 &ll_bitsize, &ll_bitpos, &ll_mode,
5011 &ll_unsignedp, &volatilep, &ll_mask,
5013 lr_inner = decode_field_reference (lr_arg,
5014 &lr_bitsize, &lr_bitpos, &lr_mode,
5015 &lr_unsignedp, &volatilep, &lr_mask,
5017 rl_inner = decode_field_reference (rl_arg,
5018 &rl_bitsize, &rl_bitpos, &rl_mode,
5019 &rl_unsignedp, &volatilep, &rl_mask,
5021 rr_inner = decode_field_reference (rr_arg,
5022 &rr_bitsize, &rr_bitpos, &rr_mode,
5023 &rr_unsignedp, &volatilep, &rr_mask,
5026 /* It must be true that the inner operation on the lhs of each
5027 comparison must be the same if we are to be able to do anything.
5028 Then see if we have constants. If not, the same must be true for
5030 if (volatilep || ll_inner == 0 || rl_inner == 0
5031 || ! operand_equal_p (ll_inner, rl_inner, 0))
5034 if (TREE_CODE (lr_arg) == INTEGER_CST
5035 && TREE_CODE (rr_arg) == INTEGER_CST)
5036 l_const = lr_arg, r_const = rr_arg;
5037 else if (lr_inner == 0 || rr_inner == 0
5038 || ! operand_equal_p (lr_inner, rr_inner, 0))
5041 l_const = r_const = 0;
5043 /* If either comparison code is not correct for our logical operation,
5044 fail. However, we can convert a one-bit comparison against zero into
5045 the opposite comparison against that bit being set in the field. */
5047 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5048 if (lcode != wanted_code)
5050 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5052 /* Make the left operand unsigned, since we are only interested
5053 in the value of one bit. Otherwise we are doing the wrong
5062 /* This is analogous to the code for l_const above. */
5063 if (rcode != wanted_code)
5065 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5074 /* After this point all optimizations will generate bit-field
5075 references, which we might not want. */
5076 if (! lang_hooks.can_use_bit_fields_p ())
5079 /* See if we can find a mode that contains both fields being compared on
5080 the left. If we can't, fail. Otherwise, update all constants and masks
5081 to be relative to a field of that size. */
5082 first_bit = MIN (ll_bitpos, rl_bitpos);
5083 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5084 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5085 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5087 if (lnmode == VOIDmode)
5090 lnbitsize = GET_MODE_BITSIZE (lnmode);
5091 lnbitpos = first_bit & ~ (lnbitsize - 1);
5092 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5093 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5095 if (BYTES_BIG_ENDIAN)
5097 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5098 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5101 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5102 size_int (xll_bitpos), 0);
5103 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5104 size_int (xrl_bitpos), 0);
5108 l_const = fold_convert (lntype, l_const);
5109 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5110 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5111 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5112 fold_build1 (BIT_NOT_EXPR,
5116 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5118 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5123 r_const = fold_convert (lntype, r_const);
5124 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5125 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5126 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5127 fold_build1 (BIT_NOT_EXPR,
5131 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5133 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5137 /* If the right sides are not constant, do the same for it. Also,
5138 disallow this optimization if a size or signedness mismatch occurs
5139 between the left and right sides. */
5142 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5143 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5144 /* Make sure the two fields on the right
5145 correspond to the left without being swapped. */
5146 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5149 first_bit = MIN (lr_bitpos, rr_bitpos);
5150 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5151 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5152 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5154 if (rnmode == VOIDmode)
5157 rnbitsize = GET_MODE_BITSIZE (rnmode);
5158 rnbitpos = first_bit & ~ (rnbitsize - 1);
5159 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5160 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5162 if (BYTES_BIG_ENDIAN)
5164 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5165 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5168 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5169 size_int (xlr_bitpos), 0);
5170 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5171 size_int (xrr_bitpos), 0);
5173 /* Make a mask that corresponds to both fields being compared.
5174 Do this for both items being compared. If the operands are the
5175 same size and the bits being compared are in the same position
5176 then we can do this by masking both and comparing the masked
5178 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5179 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5180 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5182 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5183 ll_unsignedp || rl_unsignedp);
5184 if (! all_ones_mask_p (ll_mask, lnbitsize))
5185 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5187 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5188 lr_unsignedp || rr_unsignedp);
5189 if (! all_ones_mask_p (lr_mask, rnbitsize))
5190 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5192 return build2 (wanted_code, truth_type, lhs, rhs);
5195 /* There is still another way we can do something: If both pairs of
5196 fields being compared are adjacent, we may be able to make a wider
5197 field containing them both.
5199 Note that we still must mask the lhs/rhs expressions. Furthermore,
5200 the mask must be shifted to account for the shift done by
5201 make_bit_field_ref. */
5202 if ((ll_bitsize + ll_bitpos == rl_bitpos
5203 && lr_bitsize + lr_bitpos == rr_bitpos)
5204 || (ll_bitpos == rl_bitpos + rl_bitsize
5205 && lr_bitpos == rr_bitpos + rr_bitsize))
5209 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5210 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5211 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5212 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5214 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5215 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5216 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5217 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5219 /* Convert to the smaller type before masking out unwanted bits. */
5221 if (lntype != rntype)
5223 if (lnbitsize > rnbitsize)
5225 lhs = fold_convert (rntype, lhs);
5226 ll_mask = fold_convert (rntype, ll_mask);
5229 else if (lnbitsize < rnbitsize)
5231 rhs = fold_convert (lntype, rhs);
5232 lr_mask = fold_convert (lntype, lr_mask);
5237 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5238 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5240 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5241 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5243 return build2 (wanted_code, truth_type, lhs, rhs);
5249 /* Handle the case of comparisons with constants. If there is something in
5250 common between the masks, those bits of the constants must be the same.
5251 If not, the condition is always false. Test for this to avoid generating
5252 incorrect code below. */
5253 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5254 if (! integer_zerop (result)
5255 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5256 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5258 if (wanted_code == NE_EXPR)
5260 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5261 return constant_boolean_node (true, truth_type);
5265 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5266 return constant_boolean_node (false, truth_type);
5270 /* Construct the expression we will return. First get the component
5271 reference we will make. Unless the mask is all ones the width of
5272 that field, perform the mask operation. Then compare with the
5274 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5275 ll_unsignedp || rl_unsignedp);
5277 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5278 if (! all_ones_mask_p (ll_mask, lnbitsize))
5279 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5281 return build2 (wanted_code, truth_type, result,
5282 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5285 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5289 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5292 enum tree_code op_code;
5293 tree comp_const = op1;
5295 int consts_equal, consts_lt;
5298 STRIP_SIGN_NOPS (arg0);
5300 op_code = TREE_CODE (arg0);
5301 minmax_const = TREE_OPERAND (arg0, 1);
5302 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5303 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5304 inner = TREE_OPERAND (arg0, 0);
5306 /* If something does not permit us to optimize, return the original tree. */
5307 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5308 || TREE_CODE (comp_const) != INTEGER_CST
5309 || TREE_CONSTANT_OVERFLOW (comp_const)
5310 || TREE_CODE (minmax_const) != INTEGER_CST
5311 || TREE_CONSTANT_OVERFLOW (minmax_const))
5314 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5315 and GT_EXPR, doing the rest with recursive calls using logical
5319 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5321 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5324 return invert_truthvalue (tem);
5330 fold_build2 (TRUTH_ORIF_EXPR, type,
5331 optimize_minmax_comparison
5332 (EQ_EXPR, type, arg0, comp_const),
5333 optimize_minmax_comparison
5334 (GT_EXPR, type, arg0, comp_const));
5337 if (op_code == MAX_EXPR && consts_equal)
5338 /* MAX (X, 0) == 0 -> X <= 0 */
5339 return fold_build2 (LE_EXPR, type, inner, comp_const);
5341 else if (op_code == MAX_EXPR && consts_lt)
5342 /* MAX (X, 0) == 5 -> X == 5 */
5343 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5345 else if (op_code == MAX_EXPR)
5346 /* MAX (X, 0) == -1 -> false */
5347 return omit_one_operand (type, integer_zero_node, inner);
5349 else if (consts_equal)
5350 /* MIN (X, 0) == 0 -> X >= 0 */
5351 return fold_build2 (GE_EXPR, type, inner, comp_const);
5354 /* MIN (X, 0) == 5 -> false */
5355 return omit_one_operand (type, integer_zero_node, inner);
5358 /* MIN (X, 0) == -1 -> X == -1 */
5359 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5362 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5363 /* MAX (X, 0) > 0 -> X > 0
5364 MAX (X, 0) > 5 -> X > 5 */
5365 return fold_build2 (GT_EXPR, type, inner, comp_const);
5367 else if (op_code == MAX_EXPR)
5368 /* MAX (X, 0) > -1 -> true */
5369 return omit_one_operand (type, integer_one_node, inner);
5371 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5372 /* MIN (X, 0) > 0 -> false
5373 MIN (X, 0) > 5 -> false */
5374 return omit_one_operand (type, integer_zero_node, inner);
5377 /* MIN (X, 0) > -1 -> X > -1 */
5378 return fold_build2 (GT_EXPR, type, inner, comp_const);
5385 /* T is an integer expression that is being multiplied, divided, or taken a
5386 modulus (CODE says which and what kind of divide or modulus) by a
5387 constant C. See if we can eliminate that operation by folding it with
5388 other operations already in T. WIDE_TYPE, if non-null, is a type that
5389 should be used for the computation if wider than our type.
5391 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5392 (X * 2) + (Y * 4). We must, however, be assured that either the original
5393 expression would not overflow or that overflow is undefined for the type
5394 in the language in question.
5396 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5397 the machine has a multiply-accumulate insn or that this is part of an
5398 addressing calculation.
5400 If we return a non-null expression, it is an equivalent form of the
5401 original computation, but need not be in the original type. */
5404 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5406 /* To avoid exponential search depth, refuse to allow recursion past
5407 three levels. Beyond that (1) it's highly unlikely that we'll find
5408 something interesting and (2) we've probably processed it before
5409 when we built the inner expression. */
5418 ret = extract_muldiv_1 (t, c, code, wide_type);
5425 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5427 tree type = TREE_TYPE (t);
5428 enum tree_code tcode = TREE_CODE (t);
5429 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5430 > GET_MODE_SIZE (TYPE_MODE (type)))
5431 ? wide_type : type);
5433 int same_p = tcode == code;
5434 tree op0 = NULL_TREE, op1 = NULL_TREE;
5436 /* Don't deal with constants of zero here; they confuse the code below. */
5437 if (integer_zerop (c))
5440 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5441 op0 = TREE_OPERAND (t, 0);
5443 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5444 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5446 /* Note that we need not handle conditional operations here since fold
5447 already handles those cases. So just do arithmetic here. */
5451 /* For a constant, we can always simplify if we are a multiply
5452 or (for divide and modulus) if it is a multiple of our constant. */
5453 if (code == MULT_EXPR
5454 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5455 return const_binop (code, fold_convert (ctype, t),
5456 fold_convert (ctype, c), 0);
5459 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5460 /* If op0 is an expression ... */
5461 if ((COMPARISON_CLASS_P (op0)
5462 || UNARY_CLASS_P (op0)
5463 || BINARY_CLASS_P (op0)
5464 || EXPRESSION_CLASS_P (op0))
5465 /* ... and is unsigned, and its type is smaller than ctype,
5466 then we cannot pass through as widening. */
5467 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5468 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5469 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5470 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5471 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5472 /* ... or this is a truncation (t is narrower than op0),
5473 then we cannot pass through this narrowing. */
5474 || (GET_MODE_SIZE (TYPE_MODE (type))
5475 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5476 /* ... or signedness changes for division or modulus,
5477 then we cannot pass through this conversion. */
5478 || (code != MULT_EXPR
5479 && (TYPE_UNSIGNED (ctype)
5480 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5483 /* Pass the constant down and see if we can make a simplification. If
5484 we can, replace this expression with the inner simplification for
5485 possible later conversion to our or some other type. */
5486 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5487 && TREE_CODE (t2) == INTEGER_CST
5488 && ! TREE_CONSTANT_OVERFLOW (t2)
5489 && (0 != (t1 = extract_muldiv (op0, t2, code,
5491 ? ctype : NULL_TREE))))
5496 /* If widening the type changes it from signed to unsigned, then we
5497 must avoid building ABS_EXPR itself as unsigned. */
5498 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5500 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5501 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5503 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5504 return fold_convert (ctype, t1);
5510 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5511 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5514 case MIN_EXPR: case MAX_EXPR:
5515 /* If widening the type changes the signedness, then we can't perform
5516 this optimization as that changes the result. */
5517 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5520 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5521 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5522 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5524 if (tree_int_cst_sgn (c) < 0)
5525 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5527 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5528 fold_convert (ctype, t2));
5532 case LSHIFT_EXPR: case RSHIFT_EXPR:
5533 /* If the second operand is constant, this is a multiplication
5534 or floor division, by a power of two, so we can treat it that
5535 way unless the multiplier or divisor overflows. Signed
5536 left-shift overflow is implementation-defined rather than
5537 undefined in C90, so do not convert signed left shift into
5539 if (TREE_CODE (op1) == INTEGER_CST
5540 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5541 /* const_binop may not detect overflow correctly,
5542 so check for it explicitly here. */
5543 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5544 && TREE_INT_CST_HIGH (op1) == 0
5545 && 0 != (t1 = fold_convert (ctype,
5546 const_binop (LSHIFT_EXPR,
5549 && ! TREE_OVERFLOW (t1))
5550 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5551 ? MULT_EXPR : FLOOR_DIV_EXPR,
5552 ctype, fold_convert (ctype, op0), t1),
5553 c, code, wide_type);
5556 case PLUS_EXPR: case MINUS_EXPR:
5557 /* See if we can eliminate the operation on both sides. If we can, we
5558 can return a new PLUS or MINUS. If we can't, the only remaining
5559 cases where we can do anything are if the second operand is a
5561 t1 = extract_muldiv (op0, c, code, wide_type);
5562 t2 = extract_muldiv (op1, c, code, wide_type);
5563 if (t1 != 0 && t2 != 0
5564 && (code == MULT_EXPR
5565 /* If not multiplication, we can only do this if both operands
5566 are divisible by c. */
5567 || (multiple_of_p (ctype, op0, c)
5568 && multiple_of_p (ctype, op1, c))))
5569 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5570 fold_convert (ctype, t2));
5572 /* If this was a subtraction, negate OP1 and set it to be an addition.
5573 This simplifies the logic below. */
5574 if (tcode == MINUS_EXPR)
5575 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5577 if (TREE_CODE (op1) != INTEGER_CST)
5580 /* If either OP1 or C are negative, this optimization is not safe for
5581 some of the division and remainder types while for others we need
5582 to change the code. */
5583 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5585 if (code == CEIL_DIV_EXPR)
5586 code = FLOOR_DIV_EXPR;
5587 else if (code == FLOOR_DIV_EXPR)
5588 code = CEIL_DIV_EXPR;
5589 else if (code != MULT_EXPR
5590 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5594 /* If it's a multiply or a division/modulus operation of a multiple
5595 of our constant, do the operation and verify it doesn't overflow. */
5596 if (code == MULT_EXPR
5597 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5599 op1 = const_binop (code, fold_convert (ctype, op1),
5600 fold_convert (ctype, c), 0);
5601 /* We allow the constant to overflow with wrapping semantics. */
5603 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5609 /* If we have an unsigned type is not a sizetype, we cannot widen
5610 the operation since it will change the result if the original
5611 computation overflowed. */
5612 if (TYPE_UNSIGNED (ctype)
5613 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5617 /* If we were able to eliminate our operation from the first side,
5618 apply our operation to the second side and reform the PLUS. */
5619 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5620 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5622 /* The last case is if we are a multiply. In that case, we can
5623 apply the distributive law to commute the multiply and addition
5624 if the multiplication of the constants doesn't overflow. */
5625 if (code == MULT_EXPR)
5626 return fold_build2 (tcode, ctype,
5627 fold_build2 (code, ctype,
5628 fold_convert (ctype, op0),
5629 fold_convert (ctype, c)),
5635 /* We have a special case here if we are doing something like
5636 (C * 8) % 4 since we know that's zero. */
5637 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5638 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5639 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5640 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5641 return omit_one_operand (type, integer_zero_node, op0);
5643 /* ... fall through ... */
5645 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5646 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5647 /* If we can extract our operation from the LHS, do so and return a
5648 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5649 do something only if the second operand is a constant. */
5651 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5652 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5653 fold_convert (ctype, op1));
5654 else if (tcode == MULT_EXPR && code == MULT_EXPR
5655 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5656 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5657 fold_convert (ctype, t1));
5658 else if (TREE_CODE (op1) != INTEGER_CST)
5661 /* If these are the same operation types, we can associate them
5662 assuming no overflow. */
5664 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5665 fold_convert (ctype, c), 0))
5666 && ! TREE_OVERFLOW (t1))
5667 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5669 /* If these operations "cancel" each other, we have the main
5670 optimizations of this pass, which occur when either constant is a
5671 multiple of the other, in which case we replace this with either an
5672 operation or CODE or TCODE.
5674 If we have an unsigned type that is not a sizetype, we cannot do
5675 this since it will change the result if the original computation
5677 if ((! TYPE_UNSIGNED (ctype)
5678 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5680 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5681 || (tcode == MULT_EXPR
5682 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5683 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5685 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5686 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5687 fold_convert (ctype,
5688 const_binop (TRUNC_DIV_EXPR,
5690 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5691 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5692 fold_convert (ctype,
5693 const_binop (TRUNC_DIV_EXPR,
5705 /* Return a node which has the indicated constant VALUE (either 0 or
5706 1), and is of the indicated TYPE. */
5709 constant_boolean_node (int value, tree type)
5711 if (type == integer_type_node)
5712 return value ? integer_one_node : integer_zero_node;
5713 else if (type == boolean_type_node)
5714 return value ? boolean_true_node : boolean_false_node;
5716 return build_int_cst (type, value);
5720 /* Return true if expr looks like an ARRAY_REF and set base and
5721 offset to the appropriate trees. If there is no offset,
5722 offset is set to NULL_TREE. Base will be canonicalized to
5723 something you can get the element type from using
5724 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5725 in bytes to the base. */
5728 extract_array_ref (tree expr, tree *base, tree *offset)
5730 /* One canonical form is a PLUS_EXPR with the first
5731 argument being an ADDR_EXPR with a possible NOP_EXPR
5733 if (TREE_CODE (expr) == PLUS_EXPR)
5735 tree op0 = TREE_OPERAND (expr, 0);
5736 tree inner_base, dummy1;
5737 /* Strip NOP_EXPRs here because the C frontends and/or
5738 folders present us (int *)&x.a + 4B possibly. */
5740 if (extract_array_ref (op0, &inner_base, &dummy1))
5743 if (dummy1 == NULL_TREE)
5744 *offset = TREE_OPERAND (expr, 1);
5746 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5747 dummy1, TREE_OPERAND (expr, 1));
5751 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5752 which we transform into an ADDR_EXPR with appropriate
5753 offset. For other arguments to the ADDR_EXPR we assume
5754 zero offset and as such do not care about the ADDR_EXPR
5755 type and strip possible nops from it. */
5756 else if (TREE_CODE (expr) == ADDR_EXPR)
5758 tree op0 = TREE_OPERAND (expr, 0);
5759 if (TREE_CODE (op0) == ARRAY_REF)
5761 tree idx = TREE_OPERAND (op0, 1);
5762 *base = TREE_OPERAND (op0, 0);
5763 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5764 array_ref_element_size (op0));
5768 /* Handle array-to-pointer decay as &a. */
5769 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5770 *base = TREE_OPERAND (expr, 0);
5773 *offset = NULL_TREE;
5777 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5778 else if (SSA_VAR_P (expr)
5779 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5782 *offset = NULL_TREE;
5790 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5791 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5792 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5793 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5794 COND is the first argument to CODE; otherwise (as in the example
5795 given here), it is the second argument. TYPE is the type of the
5796 original expression. Return NULL_TREE if no simplification is
5800 fold_binary_op_with_conditional_arg (enum tree_code code,
5801 tree type, tree op0, tree op1,
5802 tree cond, tree arg, int cond_first_p)
5804 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5805 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5806 tree test, true_value, false_value;
5807 tree lhs = NULL_TREE;
5808 tree rhs = NULL_TREE;
5810 /* This transformation is only worthwhile if we don't have to wrap
5811 arg in a SAVE_EXPR, and the operation can be simplified on at least
5812 one of the branches once its pushed inside the COND_EXPR. */
5813 if (!TREE_CONSTANT (arg))
5816 if (TREE_CODE (cond) == COND_EXPR)
5818 test = TREE_OPERAND (cond, 0);
5819 true_value = TREE_OPERAND (cond, 1);
5820 false_value = TREE_OPERAND (cond, 2);
5821 /* If this operand throws an expression, then it does not make
5822 sense to try to perform a logical or arithmetic operation
5824 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5826 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5831 tree testtype = TREE_TYPE (cond);
5833 true_value = constant_boolean_node (true, testtype);
5834 false_value = constant_boolean_node (false, testtype);
5837 arg = fold_convert (arg_type, arg);
5840 true_value = fold_convert (cond_type, true_value);
5842 lhs = fold_build2 (code, type, true_value, arg);
5844 lhs = fold_build2 (code, type, arg, true_value);
5848 false_value = fold_convert (cond_type, false_value);
5850 rhs = fold_build2 (code, type, false_value, arg);
5852 rhs = fold_build2 (code, type, arg, false_value);
5855 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5856 return fold_convert (type, test);
5860 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5862 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5863 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5864 ADDEND is the same as X.
5866 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5867 and finite. The problematic cases are when X is zero, and its mode
5868 has signed zeros. In the case of rounding towards -infinity,
5869 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5870 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5873 fold_real_zero_addition_p (tree type, tree addend, int negate)
5875 if (!real_zerop (addend))
5878 /* Don't allow the fold with -fsignaling-nans. */
5879 if (HONOR_SNANS (TYPE_MODE (type)))
5882 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5883 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5886 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5887 if (TREE_CODE (addend) == REAL_CST
5888 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5891 /* The mode has signed zeros, and we have to honor their sign.
5892 In this situation, there is only one case we can return true for.
5893 X - 0 is the same as X unless rounding towards -infinity is
5895 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5898 /* Subroutine of fold() that checks comparisons of built-in math
5899 functions against real constants.
5901 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5902 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5903 is the type of the result and ARG0 and ARG1 are the operands of the
5904 comparison. ARG1 must be a TREE_REAL_CST.
5906 The function returns the constant folded tree if a simplification
5907 can be made, and NULL_TREE otherwise. */
5910 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5911 tree type, tree arg0, tree arg1)
5915 if (BUILTIN_SQRT_P (fcode))
5917 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5918 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5920 c = TREE_REAL_CST (arg1);
5921 if (REAL_VALUE_NEGATIVE (c))
5923 /* sqrt(x) < y is always false, if y is negative. */
5924 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5925 return omit_one_operand (type, integer_zero_node, arg);
5927 /* sqrt(x) > y is always true, if y is negative and we
5928 don't care about NaNs, i.e. negative values of x. */
5929 if (code == NE_EXPR || !HONOR_NANS (mode))
5930 return omit_one_operand (type, integer_one_node, arg);
5932 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5933 return fold_build2 (GE_EXPR, type, arg,
5934 build_real (TREE_TYPE (arg), dconst0));
5936 else if (code == GT_EXPR || code == GE_EXPR)
5940 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5941 real_convert (&c2, mode, &c2);
5943 if (REAL_VALUE_ISINF (c2))
5945 /* sqrt(x) > y is x == +Inf, when y is very large. */
5946 if (HONOR_INFINITIES (mode))
5947 return fold_build2 (EQ_EXPR, type, arg,
5948 build_real (TREE_TYPE (arg), c2));
5950 /* sqrt(x) > y is always false, when y is very large
5951 and we don't care about infinities. */
5952 return omit_one_operand (type, integer_zero_node, arg);
5955 /* sqrt(x) > c is the same as x > c*c. */
5956 return fold_build2 (code, type, arg,
5957 build_real (TREE_TYPE (arg), c2));
5959 else if (code == LT_EXPR || code == LE_EXPR)
5963 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5964 real_convert (&c2, mode, &c2);
5966 if (REAL_VALUE_ISINF (c2))
5968 /* sqrt(x) < y is always true, when y is a very large
5969 value and we don't care about NaNs or Infinities. */
5970 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5971 return omit_one_operand (type, integer_one_node, arg);
5973 /* sqrt(x) < y is x != +Inf when y is very large and we
5974 don't care about NaNs. */
5975 if (! HONOR_NANS (mode))
5976 return fold_build2 (NE_EXPR, type, arg,
5977 build_real (TREE_TYPE (arg), c2));
5979 /* sqrt(x) < y is x >= 0 when y is very large and we
5980 don't care about Infinities. */
5981 if (! HONOR_INFINITIES (mode))
5982 return fold_build2 (GE_EXPR, type, arg,
5983 build_real (TREE_TYPE (arg), dconst0));
5985 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5986 if (lang_hooks.decls.global_bindings_p () != 0
5987 || CONTAINS_PLACEHOLDER_P (arg))
5990 arg = save_expr (arg);
5991 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5992 fold_build2 (GE_EXPR, type, arg,
5993 build_real (TREE_TYPE (arg),
5995 fold_build2 (NE_EXPR, type, arg,
5996 build_real (TREE_TYPE (arg),
6000 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6001 if (! HONOR_NANS (mode))
6002 return fold_build2 (code, type, arg,
6003 build_real (TREE_TYPE (arg), c2));
6005 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6006 if (lang_hooks.decls.global_bindings_p () == 0
6007 && ! CONTAINS_PLACEHOLDER_P (arg))
6009 arg = save_expr (arg);
6010 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6011 fold_build2 (GE_EXPR, type, arg,
6012 build_real (TREE_TYPE (arg),
6014 fold_build2 (code, type, arg,
6015 build_real (TREE_TYPE (arg),
6024 /* Subroutine of fold() that optimizes comparisons against Infinities,
6025 either +Inf or -Inf.
6027 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6028 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6029 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6031 The function returns the constant folded tree if a simplification
6032 can be made, and NULL_TREE otherwise. */
6035 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6037 enum machine_mode mode;
6038 REAL_VALUE_TYPE max;
6042 mode = TYPE_MODE (TREE_TYPE (arg0));
6044 /* For negative infinity swap the sense of the comparison. */
6045 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6047 code = swap_tree_comparison (code);
6052 /* x > +Inf is always false, if with ignore sNANs. */
6053 if (HONOR_SNANS (mode))
6055 return omit_one_operand (type, integer_zero_node, arg0);
6058 /* x <= +Inf is always true, if we don't case about NaNs. */
6059 if (! HONOR_NANS (mode))
6060 return omit_one_operand (type, integer_one_node, arg0);
6062 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6063 if (lang_hooks.decls.global_bindings_p () == 0
6064 && ! CONTAINS_PLACEHOLDER_P (arg0))
6066 arg0 = save_expr (arg0);
6067 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6073 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6074 real_maxval (&max, neg, mode);
6075 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6076 arg0, build_real (TREE_TYPE (arg0), max));
6079 /* x < +Inf is always equal to x <= DBL_MAX. */
6080 real_maxval (&max, neg, mode);
6081 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6082 arg0, build_real (TREE_TYPE (arg0), max));
6085 /* x != +Inf is always equal to !(x > DBL_MAX). */
6086 real_maxval (&max, neg, mode);
6087 if (! HONOR_NANS (mode))
6088 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6089 arg0, build_real (TREE_TYPE (arg0), max));
6091 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6092 arg0, build_real (TREE_TYPE (arg0), max));
6093 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6102 /* Subroutine of fold() that optimizes comparisons of a division by
6103 a nonzero integer constant against an integer constant, i.e.
6106 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6107 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6108 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6110 The function returns the constant folded tree if a simplification
6111 can be made, and NULL_TREE otherwise. */
6114 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6116 tree prod, tmp, hi, lo;
6117 tree arg00 = TREE_OPERAND (arg0, 0);
6118 tree arg01 = TREE_OPERAND (arg0, 1);
6119 unsigned HOST_WIDE_INT lpart;
6120 HOST_WIDE_INT hpart;
6121 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6125 /* We have to do this the hard way to detect unsigned overflow.
6126 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6127 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6128 TREE_INT_CST_HIGH (arg01),
6129 TREE_INT_CST_LOW (arg1),
6130 TREE_INT_CST_HIGH (arg1),
6131 &lpart, &hpart, unsigned_p);
6132 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6133 prod = force_fit_type (prod, -1, overflow, false);
6134 neg_overflow = false;
6138 tmp = int_const_binop (MINUS_EXPR, arg01,
6139 build_int_cst (TREE_TYPE (arg01), 1), 0);
6142 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6143 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6144 TREE_INT_CST_HIGH (prod),
6145 TREE_INT_CST_LOW (tmp),
6146 TREE_INT_CST_HIGH (tmp),
6147 &lpart, &hpart, unsigned_p);
6148 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6149 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6150 TREE_CONSTANT_OVERFLOW (prod));
6152 else if (tree_int_cst_sgn (arg01) >= 0)
6154 tmp = int_const_binop (MINUS_EXPR, arg01,
6155 build_int_cst (TREE_TYPE (arg01), 1), 0);
6156 switch (tree_int_cst_sgn (arg1))
6159 neg_overflow = true;
6160 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6165 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6170 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6180 /* A negative divisor reverses the relational operators. */
6181 code = swap_tree_comparison (code);
6183 tmp = int_const_binop (PLUS_EXPR, arg01,
6184 build_int_cst (TREE_TYPE (arg01), 1), 0);
6185 switch (tree_int_cst_sgn (arg1))
6188 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6193 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6198 neg_overflow = true;
6199 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6211 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6212 return omit_one_operand (type, integer_zero_node, arg00);
6213 if (TREE_OVERFLOW (hi))
6214 return fold_build2 (GE_EXPR, type, arg00, lo);
6215 if (TREE_OVERFLOW (lo))
6216 return fold_build2 (LE_EXPR, type, arg00, hi);
6217 return build_range_check (type, arg00, 1, lo, hi);
6220 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6221 return omit_one_operand (type, integer_one_node, arg00);
6222 if (TREE_OVERFLOW (hi))
6223 return fold_build2 (LT_EXPR, type, arg00, lo);
6224 if (TREE_OVERFLOW (lo))
6225 return fold_build2 (GT_EXPR, type, arg00, hi);
6226 return build_range_check (type, arg00, 0, lo, hi);
6229 if (TREE_OVERFLOW (lo))
6231 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6232 return omit_one_operand (type, tmp, arg00);
6234 return fold_build2 (LT_EXPR, type, arg00, lo);
6237 if (TREE_OVERFLOW (hi))
6239 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6240 return omit_one_operand (type, tmp, arg00);
6242 return fold_build2 (LE_EXPR, type, arg00, hi);
6245 if (TREE_OVERFLOW (hi))
6247 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6248 return omit_one_operand (type, tmp, arg00);
6250 return fold_build2 (GT_EXPR, type, arg00, hi);
6253 if (TREE_OVERFLOW (lo))
6255 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6256 return omit_one_operand (type, tmp, arg00);
6258 return fold_build2 (GE_EXPR, type, arg00, lo);
6268 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6269 equality/inequality test, then return a simplified form of the test
6270 using a sign testing. Otherwise return NULL. TYPE is the desired
6274 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6277 /* If this is testing a single bit, we can optimize the test. */
6278 if ((code == NE_EXPR || code == EQ_EXPR)
6279 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6280 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6282 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6283 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6284 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6286 if (arg00 != NULL_TREE
6287 /* This is only a win if casting to a signed type is cheap,
6288 i.e. when arg00's type is not a partial mode. */
6289 && TYPE_PRECISION (TREE_TYPE (arg00))
6290 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6292 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6293 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6294 result_type, fold_convert (stype, arg00),
6295 build_int_cst (stype, 0));
6302 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6303 equality/inequality test, then return a simplified form of
6304 the test using shifts and logical operations. Otherwise return
6305 NULL. TYPE is the desired result type. */
6308 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6311 /* If this is testing a single bit, we can optimize the test. */
6312 if ((code == NE_EXPR || code == EQ_EXPR)
6313 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6314 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6316 tree inner = TREE_OPERAND (arg0, 0);
6317 tree type = TREE_TYPE (arg0);
6318 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6319 enum machine_mode operand_mode = TYPE_MODE (type);
6321 tree signed_type, unsigned_type, intermediate_type;
6324 /* First, see if we can fold the single bit test into a sign-bit
6326 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6331 /* Otherwise we have (A & C) != 0 where C is a single bit,
6332 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6333 Similarly for (A & C) == 0. */
6335 /* If INNER is a right shift of a constant and it plus BITNUM does
6336 not overflow, adjust BITNUM and INNER. */
6337 if (TREE_CODE (inner) == RSHIFT_EXPR
6338 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6339 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6340 && bitnum < TYPE_PRECISION (type)
6341 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6342 bitnum - TYPE_PRECISION (type)))
6344 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6345 inner = TREE_OPERAND (inner, 0);
6348 /* If we are going to be able to omit the AND below, we must do our
6349 operations as unsigned. If we must use the AND, we have a choice.
6350 Normally unsigned is faster, but for some machines signed is. */
6351 #ifdef LOAD_EXTEND_OP
6352 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6353 && !flag_syntax_only) ? 0 : 1;
6358 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6359 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6360 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6361 inner = fold_convert (intermediate_type, inner);
6364 inner = build2 (RSHIFT_EXPR, intermediate_type,
6365 inner, size_int (bitnum));
6367 one = build_int_cst (intermediate_type, 1);
6369 if (code == EQ_EXPR)
6370 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6372 /* Put the AND last so it can combine with more things. */
6373 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6375 /* Make sure to return the proper type. */
6376 inner = fold_convert (result_type, inner);
6383 /* Check whether we are allowed to reorder operands arg0 and arg1,
6384 such that the evaluation of arg1 occurs before arg0. */
6387 reorder_operands_p (tree arg0, tree arg1)
6389 if (! flag_evaluation_order)
6391 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6393 return ! TREE_SIDE_EFFECTS (arg0)
6394 && ! TREE_SIDE_EFFECTS (arg1);
6397 /* Test whether it is preferable two swap two operands, ARG0 and
6398 ARG1, for example because ARG0 is an integer constant and ARG1
6399 isn't. If REORDER is true, only recommend swapping if we can
6400 evaluate the operands in reverse order. */
6403 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6405 STRIP_SIGN_NOPS (arg0);
6406 STRIP_SIGN_NOPS (arg1);
6408 if (TREE_CODE (arg1) == INTEGER_CST)
6410 if (TREE_CODE (arg0) == INTEGER_CST)
6413 if (TREE_CODE (arg1) == REAL_CST)
6415 if (TREE_CODE (arg0) == REAL_CST)
6418 if (TREE_CODE (arg1) == COMPLEX_CST)
6420 if (TREE_CODE (arg0) == COMPLEX_CST)
6423 if (TREE_CONSTANT (arg1))
6425 if (TREE_CONSTANT (arg0))
6431 if (reorder && flag_evaluation_order
6432 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6440 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6441 for commutative and comparison operators. Ensuring a canonical
6442 form allows the optimizers to find additional redundancies without
6443 having to explicitly check for both orderings. */
6444 if (TREE_CODE (arg0) == SSA_NAME
6445 && TREE_CODE (arg1) == SSA_NAME
6446 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6452 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6453 ARG0 is extended to a wider type. */
6456 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6458 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6460 tree shorter_type, outer_type;
6464 if (arg0_unw == arg0)
6466 shorter_type = TREE_TYPE (arg0_unw);
6468 #ifdef HAVE_canonicalize_funcptr_for_compare
6469 /* Disable this optimization if we're casting a function pointer
6470 type on targets that require function pointer canonicalization. */
6471 if (HAVE_canonicalize_funcptr_for_compare
6472 && TREE_CODE (shorter_type) == POINTER_TYPE
6473 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6477 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6480 arg1_unw = get_unwidened (arg1, shorter_type);
6482 /* If possible, express the comparison in the shorter mode. */
6483 if ((code == EQ_EXPR || code == NE_EXPR
6484 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6485 && (TREE_TYPE (arg1_unw) == shorter_type
6486 || (TREE_CODE (arg1_unw) == INTEGER_CST
6487 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6488 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6489 && int_fits_type_p (arg1_unw, shorter_type))))
6490 return fold_build2 (code, type, arg0_unw,
6491 fold_convert (shorter_type, arg1_unw));
6493 if (TREE_CODE (arg1_unw) != INTEGER_CST
6494 || TREE_CODE (shorter_type) != INTEGER_TYPE
6495 || !int_fits_type_p (arg1_unw, shorter_type))
6498 /* If we are comparing with the integer that does not fit into the range
6499 of the shorter type, the result is known. */
6500 outer_type = TREE_TYPE (arg1_unw);
6501 min = lower_bound_in_type (outer_type, shorter_type);
6502 max = upper_bound_in_type (outer_type, shorter_type);
6504 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6506 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6513 return omit_one_operand (type, integer_zero_node, arg0);
6518 return omit_one_operand (type, integer_one_node, arg0);
6524 return omit_one_operand (type, integer_one_node, arg0);
6526 return omit_one_operand (type, integer_zero_node, arg0);
6531 return omit_one_operand (type, integer_zero_node, arg0);
6533 return omit_one_operand (type, integer_one_node, arg0);
6542 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6543 ARG0 just the signedness is changed. */
6546 fold_sign_changed_comparison (enum tree_code code, tree type,
6547 tree arg0, tree arg1)
6549 tree arg0_inner, tmp;
6550 tree inner_type, outer_type;
6552 if (TREE_CODE (arg0) != NOP_EXPR
6553 && TREE_CODE (arg0) != CONVERT_EXPR)
6556 outer_type = TREE_TYPE (arg0);
6557 arg0_inner = TREE_OPERAND (arg0, 0);
6558 inner_type = TREE_TYPE (arg0_inner);
6560 #ifdef HAVE_canonicalize_funcptr_for_compare
6561 /* Disable this optimization if we're casting a function pointer
6562 type on targets that require function pointer canonicalization. */
6563 if (HAVE_canonicalize_funcptr_for_compare
6564 && TREE_CODE (inner_type) == POINTER_TYPE
6565 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6569 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6572 if (TREE_CODE (arg1) != INTEGER_CST
6573 && !((TREE_CODE (arg1) == NOP_EXPR
6574 || TREE_CODE (arg1) == CONVERT_EXPR)
6575 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6578 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6583 if (TREE_CODE (arg1) == INTEGER_CST)
6585 tmp = build_int_cst_wide (inner_type,
6586 TREE_INT_CST_LOW (arg1),
6587 TREE_INT_CST_HIGH (arg1));
6588 arg1 = force_fit_type (tmp, 0,
6589 TREE_OVERFLOW (arg1),
6590 TREE_CONSTANT_OVERFLOW (arg1));
6593 arg1 = fold_convert (inner_type, arg1);
6595 return fold_build2 (code, type, arg0_inner, arg1);
6598 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6599 step of the array. Reconstructs s and delta in the case of s * delta
6600 being an integer constant (and thus already folded).
6601 ADDR is the address. MULT is the multiplicative expression.
6602 If the function succeeds, the new address expression is returned. Otherwise
6603 NULL_TREE is returned. */
6606 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6608 tree s, delta, step;
6609 tree ref = TREE_OPERAND (addr, 0), pref;
6613 /* Canonicalize op1 into a possibly non-constant delta
6614 and an INTEGER_CST s. */
6615 if (TREE_CODE (op1) == MULT_EXPR)
6617 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6622 if (TREE_CODE (arg0) == INTEGER_CST)
6627 else if (TREE_CODE (arg1) == INTEGER_CST)
6635 else if (TREE_CODE (op1) == INTEGER_CST)
6642 /* Simulate we are delta * 1. */
6644 s = integer_one_node;
6647 for (;; ref = TREE_OPERAND (ref, 0))
6649 if (TREE_CODE (ref) == ARRAY_REF)
6651 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6655 step = array_ref_element_size (ref);
6656 if (TREE_CODE (step) != INTEGER_CST)
6661 if (! tree_int_cst_equal (step, s))
6666 /* Try if delta is a multiple of step. */
6667 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6676 if (!handled_component_p (ref))
6680 /* We found the suitable array reference. So copy everything up to it,
6681 and replace the index. */
6683 pref = TREE_OPERAND (addr, 0);
6684 ret = copy_node (pref);
6689 pref = TREE_OPERAND (pref, 0);
6690 TREE_OPERAND (pos, 0) = copy_node (pref);
6691 pos = TREE_OPERAND (pos, 0);
6694 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6695 fold_convert (itype,
6696 TREE_OPERAND (pos, 1)),
6697 fold_convert (itype, delta));
6699 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6703 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6704 means A >= Y && A != MAX, but in this case we know that
6705 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6708 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6710 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6712 if (TREE_CODE (bound) == LT_EXPR)
6713 a = TREE_OPERAND (bound, 0);
6714 else if (TREE_CODE (bound) == GT_EXPR)
6715 a = TREE_OPERAND (bound, 1);
6719 typea = TREE_TYPE (a);
6720 if (!INTEGRAL_TYPE_P (typea)
6721 && !POINTER_TYPE_P (typea))
6724 if (TREE_CODE (ineq) == LT_EXPR)
6726 a1 = TREE_OPERAND (ineq, 1);
6727 y = TREE_OPERAND (ineq, 0);
6729 else if (TREE_CODE (ineq) == GT_EXPR)
6731 a1 = TREE_OPERAND (ineq, 0);
6732 y = TREE_OPERAND (ineq, 1);
6737 if (TREE_TYPE (a1) != typea)
6740 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6741 if (!integer_onep (diff))
6744 return fold_build2 (GE_EXPR, type, a, y);
6747 /* Fold a sum or difference of at least one multiplication.
6748 Returns the folded tree or NULL if no simplification could be made. */
6751 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6753 tree arg00, arg01, arg10, arg11;
6754 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6756 /* (A * C) +- (B * C) -> (A+-B) * C.
6757 (A * C) +- A -> A * (C+-1).
6758 We are most concerned about the case where C is a constant,
6759 but other combinations show up during loop reduction. Since
6760 it is not difficult, try all four possibilities. */
6762 if (TREE_CODE (arg0) == MULT_EXPR)
6764 arg00 = TREE_OPERAND (arg0, 0);
6765 arg01 = TREE_OPERAND (arg0, 1);
6770 arg01 = build_one_cst (type);
6772 if (TREE_CODE (arg1) == MULT_EXPR)
6774 arg10 = TREE_OPERAND (arg1, 0);
6775 arg11 = TREE_OPERAND (arg1, 1);
6780 arg11 = build_one_cst (type);
6784 if (operand_equal_p (arg01, arg11, 0))
6785 same = arg01, alt0 = arg00, alt1 = arg10;
6786 else if (operand_equal_p (arg00, arg10, 0))
6787 same = arg00, alt0 = arg01, alt1 = arg11;
6788 else if (operand_equal_p (arg00, arg11, 0))
6789 same = arg00, alt0 = arg01, alt1 = arg10;
6790 else if (operand_equal_p (arg01, arg10, 0))
6791 same = arg01, alt0 = arg00, alt1 = arg11;
6793 /* No identical multiplicands; see if we can find a common
6794 power-of-two factor in non-power-of-two multiplies. This
6795 can help in multi-dimensional array access. */
6796 else if (host_integerp (arg01, 0)
6797 && host_integerp (arg11, 0))
6799 HOST_WIDE_INT int01, int11, tmp;
6802 int01 = TREE_INT_CST_LOW (arg01);
6803 int11 = TREE_INT_CST_LOW (arg11);
6805 /* Move min of absolute values to int11. */
6806 if ((int01 >= 0 ? int01 : -int01)
6807 < (int11 >= 0 ? int11 : -int11))
6809 tmp = int01, int01 = int11, int11 = tmp;
6810 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6817 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6819 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6820 build_int_cst (TREE_TYPE (arg00),
6825 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6830 return fold_build2 (MULT_EXPR, type,
6831 fold_build2 (code, type,
6832 fold_convert (type, alt0),
6833 fold_convert (type, alt1)),
6834 fold_convert (type, same));
6839 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6840 specified by EXPR into the buffer PTR of length LEN bytes.
6841 Return the number of bytes placed in the buffer, or zero
6845 native_encode_int (tree expr, unsigned char *ptr, int len)
6847 tree type = TREE_TYPE (expr);
6848 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6849 int byte, offset, word, words;
6850 unsigned char value;
6852 if (total_bytes > len)
6854 words = total_bytes / UNITS_PER_WORD;
6856 for (byte = 0; byte < total_bytes; byte++)
6858 int bitpos = byte * BITS_PER_UNIT;
6859 if (bitpos < HOST_BITS_PER_WIDE_INT)
6860 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6862 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6863 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6865 if (total_bytes > UNITS_PER_WORD)
6867 word = byte / UNITS_PER_WORD;
6868 if (WORDS_BIG_ENDIAN)
6869 word = (words - 1) - word;
6870 offset = word * UNITS_PER_WORD;
6871 if (BYTES_BIG_ENDIAN)
6872 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6874 offset += byte % UNITS_PER_WORD;
6877 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6878 ptr[offset] = value;
6884 /* Subroutine of native_encode_expr. Encode the REAL_CST
6885 specified by EXPR into the buffer PTR of length LEN bytes.
6886 Return the number of bytes placed in the buffer, or zero
6890 native_encode_real (tree expr, unsigned char *ptr, int len)
6892 tree type = TREE_TYPE (expr);
6893 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6894 int byte, offset, word, words;
6895 unsigned char value;
6897 /* There are always 32 bits in each long, no matter the size of
6898 the hosts long. We handle floating point representations with
6902 if (total_bytes > len)
6904 words = total_bytes / UNITS_PER_WORD;
6906 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6908 for (byte = 0; byte < total_bytes; byte++)
6910 int bitpos = byte * BITS_PER_UNIT;
6911 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6913 if (total_bytes > UNITS_PER_WORD)
6915 word = byte / UNITS_PER_WORD;
6916 if (FLOAT_WORDS_BIG_ENDIAN)
6917 word = (words - 1) - word;
6918 offset = word * UNITS_PER_WORD;
6919 if (BYTES_BIG_ENDIAN)
6920 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6922 offset += byte % UNITS_PER_WORD;
6925 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6926 ptr[offset] = value;
6931 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6932 specified by EXPR into the buffer PTR of length LEN bytes.
6933 Return the number of bytes placed in the buffer, or zero
6937 native_encode_complex (tree expr, unsigned char *ptr, int len)
6942 part = TREE_REALPART (expr);
6943 rsize = native_encode_expr (part, ptr, len);
6946 part = TREE_IMAGPART (expr);
6947 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6950 return rsize + isize;
6954 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6955 specified by EXPR into the buffer PTR of length LEN bytes.
6956 Return the number of bytes placed in the buffer, or zero
6960 native_encode_vector (tree expr, unsigned char *ptr, int len)
6962 int i, size, offset, count;
6963 tree itype, elem, elements;
6966 elements = TREE_VECTOR_CST_ELTS (expr);
6967 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6968 itype = TREE_TYPE (TREE_TYPE (expr));
6969 size = GET_MODE_SIZE (TYPE_MODE (itype));
6970 for (i = 0; i < count; i++)
6974 elem = TREE_VALUE (elements);
6975 elements = TREE_CHAIN (elements);
6982 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6987 if (offset + size > len)
6989 memset (ptr+offset, 0, size);
6997 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6998 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6999 buffer PTR of length LEN bytes. Return the number of bytes
7000 placed in the buffer, or zero upon failure. */
7003 native_encode_expr (tree expr, unsigned char *ptr, int len)
7005 switch (TREE_CODE (expr))
7008 return native_encode_int (expr, ptr, len);
7011 return native_encode_real (expr, ptr, len);
7014 return native_encode_complex (expr, ptr, len);
7017 return native_encode_vector (expr, ptr, len);
7025 /* Subroutine of native_interpret_expr. Interpret the contents of
7026 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7027 If the buffer cannot be interpreted, return NULL_TREE. */
7030 native_interpret_int (tree type, unsigned char *ptr, int len)
7032 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7033 int byte, offset, word, words;
7034 unsigned char value;
7035 unsigned int HOST_WIDE_INT lo = 0;
7036 HOST_WIDE_INT hi = 0;
7038 if (total_bytes > len)
7040 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7042 words = total_bytes / UNITS_PER_WORD;
7044 for (byte = 0; byte < total_bytes; byte++)
7046 int bitpos = byte * BITS_PER_UNIT;
7047 if (total_bytes > UNITS_PER_WORD)
7049 word = byte / UNITS_PER_WORD;
7050 if (WORDS_BIG_ENDIAN)
7051 word = (words - 1) - word;
7052 offset = word * UNITS_PER_WORD;
7053 if (BYTES_BIG_ENDIAN)
7054 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7056 offset += byte % UNITS_PER_WORD;
7059 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7060 value = ptr[offset];
7062 if (bitpos < HOST_BITS_PER_WIDE_INT)
7063 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7065 hi |= (unsigned HOST_WIDE_INT) value
7066 << (bitpos - HOST_BITS_PER_WIDE_INT);
7069 return force_fit_type (build_int_cst_wide (type, lo, hi),
7074 /* Subroutine of native_interpret_expr. Interpret the contents of
7075 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7076 If the buffer cannot be interpreted, return NULL_TREE. */
7079 native_interpret_real (tree type, unsigned char *ptr, int len)
7081 enum machine_mode mode = TYPE_MODE (type);
7082 int total_bytes = GET_MODE_SIZE (mode);
7083 int byte, offset, word, words;
7084 unsigned char value;
7085 /* There are always 32 bits in each long, no matter the size of
7086 the hosts long. We handle floating point representations with
7091 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7092 if (total_bytes > len || total_bytes > 24)
7094 words = total_bytes / UNITS_PER_WORD;
7096 memset (tmp, 0, sizeof (tmp));
7097 for (byte = 0; byte < total_bytes; byte++)
7099 int bitpos = byte * BITS_PER_UNIT;
7100 if (total_bytes > UNITS_PER_WORD)
7102 word = byte / UNITS_PER_WORD;
7103 if (FLOAT_WORDS_BIG_ENDIAN)
7104 word = (words - 1) - word;
7105 offset = word * UNITS_PER_WORD;
7106 if (BYTES_BIG_ENDIAN)
7107 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7109 offset += byte % UNITS_PER_WORD;
7112 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7113 value = ptr[offset];
7115 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7118 real_from_target (&r, tmp, mode);
7119 return build_real (type, r);
7123 /* Subroutine of native_interpret_expr. Interpret the contents of
7124 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7125 If the buffer cannot be interpreted, return NULL_TREE. */
7128 native_interpret_complex (tree type, unsigned char *ptr, int len)
7130 tree etype, rpart, ipart;
7133 etype = TREE_TYPE (type);
7134 size = GET_MODE_SIZE (TYPE_MODE (etype));
7137 rpart = native_interpret_expr (etype, ptr, size);
7140 ipart = native_interpret_expr (etype, ptr+size, size);
7143 return build_complex (type, rpart, ipart);
7147 /* Subroutine of native_interpret_expr. Interpret the contents of
7148 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7149 If the buffer cannot be interpreted, return NULL_TREE. */
7152 native_interpret_vector (tree type, unsigned char *ptr, int len)
7154 tree etype, elem, elements;
7157 etype = TREE_TYPE (type);
7158 size = GET_MODE_SIZE (TYPE_MODE (etype));
7159 count = TYPE_VECTOR_SUBPARTS (type);
7160 if (size * count > len)
7163 elements = NULL_TREE;
7164 for (i = count - 1; i >= 0; i--)
7166 elem = native_interpret_expr (etype, ptr+(i*size), size);
7169 elements = tree_cons (NULL_TREE, elem, elements);
7171 return build_vector (type, elements);
7175 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7176 the buffer PTR of length LEN as a constant of type TYPE. For
7177 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7178 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7179 return NULL_TREE. */
7182 native_interpret_expr (tree type, unsigned char *ptr, int len)
7184 switch (TREE_CODE (type))
7189 return native_interpret_int (type, ptr, len);
7192 return native_interpret_real (type, ptr, len);
7195 return native_interpret_complex (type, ptr, len);
7198 return native_interpret_vector (type, ptr, len);
7206 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7207 TYPE at compile-time. If we're unable to perform the conversion
7208 return NULL_TREE. */
7211 fold_view_convert_expr (tree type, tree expr)
7213 /* We support up to 512-bit values (for V8DFmode). */
7214 unsigned char buffer[64];
7217 /* Check that the host and target are sane. */
7218 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7221 len = native_encode_expr (expr, buffer, sizeof (buffer));
7225 return native_interpret_expr (type, buffer, len);
7229 /* Fold a unary expression of code CODE and type TYPE with operand
7230 OP0. Return the folded expression if folding is successful.
7231 Otherwise, return NULL_TREE. */
7234 fold_unary (enum tree_code code, tree type, tree op0)
7238 enum tree_code_class kind = TREE_CODE_CLASS (code);
7240 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7241 && TREE_CODE_LENGTH (code) == 1);
7246 if (code == NOP_EXPR || code == CONVERT_EXPR
7247 || code == FLOAT_EXPR || code == ABS_EXPR)
7249 /* Don't use STRIP_NOPS, because signedness of argument type
7251 STRIP_SIGN_NOPS (arg0);
7255 /* Strip any conversions that don't change the mode. This
7256 is safe for every expression, except for a comparison
7257 expression because its signedness is derived from its
7260 Note that this is done as an internal manipulation within
7261 the constant folder, in order to find the simplest
7262 representation of the arguments so that their form can be
7263 studied. In any cases, the appropriate type conversions
7264 should be put back in the tree that will get out of the
7270 if (TREE_CODE_CLASS (code) == tcc_unary)
7272 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7273 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7274 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7275 else if (TREE_CODE (arg0) == COND_EXPR)
7277 tree arg01 = TREE_OPERAND (arg0, 1);
7278 tree arg02 = TREE_OPERAND (arg0, 2);
7279 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7280 arg01 = fold_build1 (code, type, arg01);
7281 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7282 arg02 = fold_build1 (code, type, arg02);
7283 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7286 /* If this was a conversion, and all we did was to move into
7287 inside the COND_EXPR, bring it back out. But leave it if
7288 it is a conversion from integer to integer and the
7289 result precision is no wider than a word since such a
7290 conversion is cheap and may be optimized away by combine,
7291 while it couldn't if it were outside the COND_EXPR. Then return
7292 so we don't get into an infinite recursion loop taking the
7293 conversion out and then back in. */
7295 if ((code == NOP_EXPR || code == CONVERT_EXPR
7296 || code == NON_LVALUE_EXPR)
7297 && TREE_CODE (tem) == COND_EXPR
7298 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7299 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7300 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7301 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7302 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7303 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7304 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7306 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7307 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7308 || flag_syntax_only))
7309 tem = build1 (code, type,
7311 TREE_TYPE (TREE_OPERAND
7312 (TREE_OPERAND (tem, 1), 0)),
7313 TREE_OPERAND (tem, 0),
7314 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7315 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7318 else if (COMPARISON_CLASS_P (arg0))
7320 if (TREE_CODE (type) == BOOLEAN_TYPE)
7322 arg0 = copy_node (arg0);
7323 TREE_TYPE (arg0) = type;
7326 else if (TREE_CODE (type) != INTEGER_TYPE)
7327 return fold_build3 (COND_EXPR, type, arg0,
7328 fold_build1 (code, type,
7330 fold_build1 (code, type,
7331 integer_zero_node));
7340 case FIX_TRUNC_EXPR:
7341 if (TREE_TYPE (op0) == type)
7344 /* If we have (type) (a CMP b) and type is an integral type, return
7345 new expression involving the new type. */
7346 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7347 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7348 TREE_OPERAND (op0, 1));
7350 /* Handle cases of two conversions in a row. */
7351 if (TREE_CODE (op0) == NOP_EXPR
7352 || TREE_CODE (op0) == CONVERT_EXPR)
7354 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7355 tree inter_type = TREE_TYPE (op0);
7356 int inside_int = INTEGRAL_TYPE_P (inside_type);
7357 int inside_ptr = POINTER_TYPE_P (inside_type);
7358 int inside_float = FLOAT_TYPE_P (inside_type);
7359 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7360 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7361 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7362 int inter_int = INTEGRAL_TYPE_P (inter_type);
7363 int inter_ptr = POINTER_TYPE_P (inter_type);
7364 int inter_float = FLOAT_TYPE_P (inter_type);
7365 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7366 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7367 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7368 int final_int = INTEGRAL_TYPE_P (type);
7369 int final_ptr = POINTER_TYPE_P (type);
7370 int final_float = FLOAT_TYPE_P (type);
7371 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7372 unsigned int final_prec = TYPE_PRECISION (type);
7373 int final_unsignedp = TYPE_UNSIGNED (type);
7375 /* In addition to the cases of two conversions in a row
7376 handled below, if we are converting something to its own
7377 type via an object of identical or wider precision, neither
7378 conversion is needed. */
7379 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7380 && (((inter_int || inter_ptr) && final_int)
7381 || (inter_float && final_float))
7382 && inter_prec >= final_prec)
7383 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7385 /* Likewise, if the intermediate and final types are either both
7386 float or both integer, we don't need the middle conversion if
7387 it is wider than the final type and doesn't change the signedness
7388 (for integers). Avoid this if the final type is a pointer
7389 since then we sometimes need the inner conversion. Likewise if
7390 the outer has a precision not equal to the size of its mode. */
7391 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7392 || (inter_float && inside_float)
7393 || (inter_vec && inside_vec))
7394 && inter_prec >= inside_prec
7395 && (inter_float || inter_vec
7396 || inter_unsignedp == inside_unsignedp)
7397 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7398 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7400 && (! final_vec || inter_prec == inside_prec))
7401 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7403 /* If we have a sign-extension of a zero-extended value, we can
7404 replace that by a single zero-extension. */
7405 if (inside_int && inter_int && final_int
7406 && inside_prec < inter_prec && inter_prec < final_prec
7407 && inside_unsignedp && !inter_unsignedp)
7408 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7410 /* Two conversions in a row are not needed unless:
7411 - some conversion is floating-point (overstrict for now), or
7412 - some conversion is a vector (overstrict for now), or
7413 - the intermediate type is narrower than both initial and
7415 - the intermediate type and innermost type differ in signedness,
7416 and the outermost type is wider than the intermediate, or
7417 - the initial type is a pointer type and the precisions of the
7418 intermediate and final types differ, or
7419 - the final type is a pointer type and the precisions of the
7420 initial and intermediate types differ.
7421 - the final type is a pointer type and the initial type not
7422 - the initial type is a pointer to an array and the final type
7424 if (! inside_float && ! inter_float && ! final_float
7425 && ! inside_vec && ! inter_vec && ! final_vec
7426 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7427 && ! (inside_int && inter_int
7428 && inter_unsignedp != inside_unsignedp
7429 && inter_prec < final_prec)
7430 && ((inter_unsignedp && inter_prec > inside_prec)
7431 == (final_unsignedp && final_prec > inter_prec))
7432 && ! (inside_ptr && inter_prec != final_prec)
7433 && ! (final_ptr && inside_prec != inter_prec)
7434 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7435 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7436 && final_ptr == inside_ptr
7438 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7439 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7440 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7443 /* Handle (T *)&A.B.C for A being of type T and B and C
7444 living at offset zero. This occurs frequently in
7445 C++ upcasting and then accessing the base. */
7446 if (TREE_CODE (op0) == ADDR_EXPR
7447 && POINTER_TYPE_P (type)
7448 && handled_component_p (TREE_OPERAND (op0, 0)))
7450 HOST_WIDE_INT bitsize, bitpos;
7452 enum machine_mode mode;
7453 int unsignedp, volatilep;
7454 tree base = TREE_OPERAND (op0, 0);
7455 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7456 &mode, &unsignedp, &volatilep, false);
7457 /* If the reference was to a (constant) zero offset, we can use
7458 the address of the base if it has the same base type
7459 as the result type. */
7460 if (! offset && bitpos == 0
7461 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7462 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7463 return fold_convert (type, build_fold_addr_expr (base));
7466 if (TREE_CODE (op0) == MODIFY_EXPR
7467 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7468 /* Detect assigning a bitfield. */
7469 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7470 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7472 /* Don't leave an assignment inside a conversion
7473 unless assigning a bitfield. */
7474 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7475 /* First do the assignment, then return converted constant. */
7476 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7477 TREE_NO_WARNING (tem) = 1;
7478 TREE_USED (tem) = 1;
7482 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7483 constants (if x has signed type, the sign bit cannot be set
7484 in c). This folds extension into the BIT_AND_EXPR. */
7485 if (INTEGRAL_TYPE_P (type)
7486 && TREE_CODE (type) != BOOLEAN_TYPE
7487 && TREE_CODE (op0) == BIT_AND_EXPR
7488 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7491 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7494 if (TYPE_UNSIGNED (TREE_TYPE (and))
7495 || (TYPE_PRECISION (type)
7496 <= TYPE_PRECISION (TREE_TYPE (and))))
7498 else if (TYPE_PRECISION (TREE_TYPE (and1))
7499 <= HOST_BITS_PER_WIDE_INT
7500 && host_integerp (and1, 1))
7502 unsigned HOST_WIDE_INT cst;
7504 cst = tree_low_cst (and1, 1);
7505 cst &= (HOST_WIDE_INT) -1
7506 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7507 change = (cst == 0);
7508 #ifdef LOAD_EXTEND_OP
7510 && !flag_syntax_only
7511 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7514 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7515 and0 = fold_convert (uns, and0);
7516 and1 = fold_convert (uns, and1);
7522 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7523 TREE_INT_CST_HIGH (and1));
7524 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7525 TREE_CONSTANT_OVERFLOW (and1));
7526 return fold_build2 (BIT_AND_EXPR, type,
7527 fold_convert (type, and0), tem);
7531 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7532 T2 being pointers to types of the same size. */
7533 if (POINTER_TYPE_P (type)
7534 && BINARY_CLASS_P (arg0)
7535 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7536 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7538 tree arg00 = TREE_OPERAND (arg0, 0);
7540 tree t1 = TREE_TYPE (arg00);
7541 tree tt0 = TREE_TYPE (t0);
7542 tree tt1 = TREE_TYPE (t1);
7543 tree s0 = TYPE_SIZE (tt0);
7544 tree s1 = TYPE_SIZE (tt1);
7546 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7547 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7548 TREE_OPERAND (arg0, 1));
7551 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7552 of the same precision, and X is a integer type not narrower than
7553 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7554 if (INTEGRAL_TYPE_P (type)
7555 && TREE_CODE (op0) == BIT_NOT_EXPR
7556 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7557 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7558 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7559 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7561 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7562 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7563 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7564 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7567 tem = fold_convert_const (code, type, arg0);
7568 return tem ? tem : NULL_TREE;
7570 case VIEW_CONVERT_EXPR:
7571 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7572 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7573 return fold_view_convert_expr (type, op0);
7576 tem = fold_negate_expr (arg0);
7578 return fold_convert (type, tem);
7582 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7583 return fold_abs_const (arg0, type);
7584 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7585 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7586 /* Convert fabs((double)float) into (double)fabsf(float). */
7587 else if (TREE_CODE (arg0) == NOP_EXPR
7588 && TREE_CODE (type) == REAL_TYPE)
7590 tree targ0 = strip_float_extensions (arg0);
7592 return fold_convert (type, fold_build1 (ABS_EXPR,
7596 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7597 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7600 /* Strip sign ops from argument. */
7601 if (TREE_CODE (type) == REAL_TYPE)
7603 tem = fold_strip_sign_ops (arg0);
7605 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7610 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7611 return fold_convert (type, arg0);
7612 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7614 tree itype = TREE_TYPE (type);
7615 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7616 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7617 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7619 if (TREE_CODE (arg0) == COMPLEX_CST)
7621 tree itype = TREE_TYPE (type);
7622 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7623 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7624 return build_complex (type, rpart, negate_expr (ipart));
7626 if (TREE_CODE (arg0) == CONJ_EXPR)
7627 return fold_convert (type, TREE_OPERAND (arg0, 0));
7631 if (TREE_CODE (arg0) == INTEGER_CST)
7632 return fold_not_const (arg0, type);
7633 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7634 return TREE_OPERAND (arg0, 0);
7635 /* Convert ~ (-A) to A - 1. */
7636 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7637 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7638 build_int_cst (type, 1));
7639 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7640 else if (INTEGRAL_TYPE_P (type)
7641 && ((TREE_CODE (arg0) == MINUS_EXPR
7642 && integer_onep (TREE_OPERAND (arg0, 1)))
7643 || (TREE_CODE (arg0) == PLUS_EXPR
7644 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7645 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7646 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7647 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7648 && (tem = fold_unary (BIT_NOT_EXPR, type,
7650 TREE_OPERAND (arg0, 0)))))
7651 return fold_build2 (BIT_XOR_EXPR, type, tem,
7652 fold_convert (type, TREE_OPERAND (arg0, 1)));
7653 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7654 && (tem = fold_unary (BIT_NOT_EXPR, type,
7656 TREE_OPERAND (arg0, 1)))))
7657 return fold_build2 (BIT_XOR_EXPR, type,
7658 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7662 case TRUTH_NOT_EXPR:
7663 /* The argument to invert_truthvalue must have Boolean type. */
7664 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7665 arg0 = fold_convert (boolean_type_node, arg0);
7667 /* Note that the operand of this must be an int
7668 and its values must be 0 or 1.
7669 ("true" is a fixed value perhaps depending on the language,
7670 but we don't handle values other than 1 correctly yet.) */
7671 tem = fold_truth_not_expr (arg0);
7674 return fold_convert (type, tem);
7677 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7678 return fold_convert (type, arg0);
7679 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7680 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7681 TREE_OPERAND (arg0, 1));
7682 if (TREE_CODE (arg0) == COMPLEX_CST)
7683 return fold_convert (type, TREE_REALPART (arg0));
7684 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7686 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7687 tem = fold_build2 (TREE_CODE (arg0), itype,
7688 fold_build1 (REALPART_EXPR, itype,
7689 TREE_OPERAND (arg0, 0)),
7690 fold_build1 (REALPART_EXPR, itype,
7691 TREE_OPERAND (arg0, 1)));
7692 return fold_convert (type, tem);
7694 if (TREE_CODE (arg0) == CONJ_EXPR)
7696 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7697 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7698 return fold_convert (type, tem);
7703 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7704 return fold_convert (type, integer_zero_node);
7705 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7706 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7707 TREE_OPERAND (arg0, 0));
7708 if (TREE_CODE (arg0) == COMPLEX_CST)
7709 return fold_convert (type, TREE_IMAGPART (arg0));
7710 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7712 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7713 tem = fold_build2 (TREE_CODE (arg0), itype,
7714 fold_build1 (IMAGPART_EXPR, itype,
7715 TREE_OPERAND (arg0, 0)),
7716 fold_build1 (IMAGPART_EXPR, itype,
7717 TREE_OPERAND (arg0, 1)));
7718 return fold_convert (type, tem);
7720 if (TREE_CODE (arg0) == CONJ_EXPR)
7722 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7723 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7724 return fold_convert (type, negate_expr (tem));
7730 } /* switch (code) */
7733 /* Fold a binary expression of code CODE and type TYPE with operands
7734 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7735 Return the folded expression if folding is successful. Otherwise,
7736 return NULL_TREE. */
7739 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7741 enum tree_code compl_code;
7743 if (code == MIN_EXPR)
7744 compl_code = MAX_EXPR;
7745 else if (code == MAX_EXPR)
7746 compl_code = MIN_EXPR;
7750 /* MIN (MAX (a, b), b) == b. Â */
7751 if (TREE_CODE (op0) == compl_code
7752 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7753 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7755 /* MIN (MAX (b, a), b) == b. Â */
7756 if (TREE_CODE (op0) == compl_code
7757 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7758 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7759 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7761 /* MIN (a, MAX (a, b)) == a. Â */
7762 if (TREE_CODE (op1) == compl_code
7763 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7764 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7765 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7767 /* MIN (a, MAX (b, a)) == a. Â */
7768 if (TREE_CODE (op1) == compl_code
7769 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7770 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7771 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7776 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7777 by changing CODE to reduce the magnitude of constants involved in
7778 ARG0 of the comparison.
7779 Returns a canonicalized comparison tree if a simplification was
7780 possible, otherwise returns NULL_TREE. */
7783 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7784 tree arg0, tree arg1)
7786 enum tree_code code0 = TREE_CODE (arg0);
7787 tree t, cst0 = NULL_TREE;
7791 /* Match A +- CST code arg1 and CST code arg1. */
7792 if (!(((code0 == MINUS_EXPR
7793 || code0 == PLUS_EXPR)
7794 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7795 || code0 == INTEGER_CST))
7798 /* Identify the constant in arg0 and its sign. */
7799 if (code0 == INTEGER_CST)
7802 cst0 = TREE_OPERAND (arg0, 1);
7803 sgn0 = tree_int_cst_sgn (cst0);
7805 /* Overflowed constants and zero will cause problems. */
7806 if (integer_zerop (cst0)
7807 || TREE_OVERFLOW (cst0))
7810 /* See if we can reduce the mangitude of the constant in
7811 arg0 by changing the comparison code. */
7812 if (code0 == INTEGER_CST)
7814 /* CST <= arg1 -> CST-1 < arg1. */
7815 if (code == LE_EXPR && sgn0 == 1)
7817 /* -CST < arg1 -> -CST-1 <= arg1. */
7818 else if (code == LT_EXPR && sgn0 == -1)
7820 /* CST > arg1 -> CST-1 >= arg1. */
7821 else if (code == GT_EXPR && sgn0 == 1)
7823 /* -CST >= arg1 -> -CST-1 > arg1. */
7824 else if (code == GE_EXPR && sgn0 == -1)
7828 /* arg1 code' CST' might be more canonical. */
7833 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7835 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7837 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7838 else if (code == GT_EXPR
7839 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7841 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7842 else if (code == LE_EXPR
7843 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7845 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7846 else if (code == GE_EXPR
7847 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7853 /* Now build the constant reduced in magnitude. */
7854 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7855 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7856 if (code0 != INTEGER_CST)
7857 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7859 /* If swapping might yield to a more canonical form, do so. */
7861 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7863 return fold_build2 (code, type, t, arg1);
7866 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7867 overflow further. Try to decrease the magnitude of constants involved
7868 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7869 and put sole constants at the second argument position.
7870 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7873 maybe_canonicalize_comparison (enum tree_code code, tree type,
7874 tree arg0, tree arg1)
7878 /* In principle pointers also have undefined overflow behavior,
7879 but that causes problems elsewhere. */
7880 if ((flag_wrapv || flag_trapv)
7881 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7882 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7885 /* Try canonicalization by simplifying arg0. */
7886 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7890 /* Try canonicalization by simplifying arg1 using the swapped
7892 code = swap_tree_comparison (code);
7893 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7896 /* Subroutine of fold_binary. This routine performs all of the
7897 transformations that are common to the equality/inequality
7898 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7899 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7900 fold_binary should call fold_binary. Fold a comparison with
7901 tree code CODE and type TYPE with operands OP0 and OP1. Return
7902 the folded comparison or NULL_TREE. */
7905 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7907 tree arg0, arg1, tem;
7912 STRIP_SIGN_NOPS (arg0);
7913 STRIP_SIGN_NOPS (arg1);
7915 tem = fold_relational_const (code, type, arg0, arg1);
7916 if (tem != NULL_TREE)
7919 /* If one arg is a real or integer constant, put it last. */
7920 if (tree_swap_operands_p (arg0, arg1, true))
7921 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7923 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7924 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7925 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7926 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7927 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7928 && !(flag_wrapv || flag_trapv))
7929 && (TREE_CODE (arg1) == INTEGER_CST
7930 && !TREE_OVERFLOW (arg1)))
7932 tree const1 = TREE_OPERAND (arg0, 1);
7934 tree variable = TREE_OPERAND (arg0, 0);
7937 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7939 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7940 TREE_TYPE (arg1), const2, const1);
7941 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7942 && (TREE_CODE (lhs) != INTEGER_CST
7943 || !TREE_OVERFLOW (lhs)))
7944 return fold_build2 (code, type, variable, lhs);
7947 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7948 same object, then we can fold this to a comparison of the two offsets in
7949 signed size type. This is possible because pointer arithmetic is
7950 restricted to retain within an object and overflow on pointer differences
7951 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7952 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7953 && !flag_wrapv && !flag_trapv)
7955 tree base0, offset0, base1, offset1;
7957 if (extract_array_ref (arg0, &base0, &offset0)
7958 && extract_array_ref (arg1, &base1, &offset1)
7959 && operand_equal_p (base0, base1, 0))
7961 tree signed_size_type_node;
7962 signed_size_type_node = signed_type_for (size_type_node);
7964 /* By converting to signed size type we cover middle-end pointer
7965 arithmetic which operates on unsigned pointer types of size
7966 type size and ARRAY_REF offsets which are properly sign or
7967 zero extended from their type in case it is narrower than
7969 if (offset0 == NULL_TREE)
7970 offset0 = build_int_cst (signed_size_type_node, 0);
7972 offset0 = fold_convert (signed_size_type_node, offset0);
7973 if (offset1 == NULL_TREE)
7974 offset1 = build_int_cst (signed_size_type_node, 0);
7976 offset1 = fold_convert (signed_size_type_node, offset1);
7978 return fold_build2 (code, type, offset0, offset1);
7982 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
7983 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
7984 the resulting offset is smaller in absolute value than the
7986 if (!(flag_wrapv || flag_trapv)
7987 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7988 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7989 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7990 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
7991 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
7992 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7993 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
7995 tree const1 = TREE_OPERAND (arg0, 1);
7996 tree const2 = TREE_OPERAND (arg1, 1);
7997 tree variable1 = TREE_OPERAND (arg0, 0);
7998 tree variable2 = TREE_OPERAND (arg1, 0);
8001 /* Put the constant on the side where it doesn't overflow and is
8002 of lower absolute value than before. */
8003 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8004 ? MINUS_EXPR : PLUS_EXPR,
8006 if (!TREE_OVERFLOW (cst)
8007 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8008 return fold_build2 (code, type,
8010 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8013 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8014 ? MINUS_EXPR : PLUS_EXPR,
8016 if (!TREE_OVERFLOW (cst)
8017 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8018 return fold_build2 (code, type,
8019 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8024 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8028 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8030 tree targ0 = strip_float_extensions (arg0);
8031 tree targ1 = strip_float_extensions (arg1);
8032 tree newtype = TREE_TYPE (targ0);
8034 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8035 newtype = TREE_TYPE (targ1);
8037 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8038 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8039 return fold_build2 (code, type, fold_convert (newtype, targ0),
8040 fold_convert (newtype, targ1));
8042 /* (-a) CMP (-b) -> b CMP a */
8043 if (TREE_CODE (arg0) == NEGATE_EXPR
8044 && TREE_CODE (arg1) == NEGATE_EXPR)
8045 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8046 TREE_OPERAND (arg0, 0));
8048 if (TREE_CODE (arg1) == REAL_CST)
8050 REAL_VALUE_TYPE cst;
8051 cst = TREE_REAL_CST (arg1);
8053 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8054 if (TREE_CODE (arg0) == NEGATE_EXPR)
8055 return fold_build2 (swap_tree_comparison (code), type,
8056 TREE_OPERAND (arg0, 0),
8057 build_real (TREE_TYPE (arg1),
8058 REAL_VALUE_NEGATE (cst)));
8060 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8061 /* a CMP (-0) -> a CMP 0 */
8062 if (REAL_VALUE_MINUS_ZERO (cst))
8063 return fold_build2 (code, type, arg0,
8064 build_real (TREE_TYPE (arg1), dconst0));
8066 /* x != NaN is always true, other ops are always false. */
8067 if (REAL_VALUE_ISNAN (cst)
8068 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8070 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8071 return omit_one_operand (type, tem, arg0);
8074 /* Fold comparisons against infinity. */
8075 if (REAL_VALUE_ISINF (cst))
8077 tem = fold_inf_compare (code, type, arg0, arg1);
8078 if (tem != NULL_TREE)
8083 /* If this is a comparison of a real constant with a PLUS_EXPR
8084 or a MINUS_EXPR of a real constant, we can convert it into a
8085 comparison with a revised real constant as long as no overflow
8086 occurs when unsafe_math_optimizations are enabled. */
8087 if (flag_unsafe_math_optimizations
8088 && TREE_CODE (arg1) == REAL_CST
8089 && (TREE_CODE (arg0) == PLUS_EXPR
8090 || TREE_CODE (arg0) == MINUS_EXPR)
8091 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8092 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8093 ? MINUS_EXPR : PLUS_EXPR,
8094 arg1, TREE_OPERAND (arg0, 1), 0))
8095 && ! TREE_CONSTANT_OVERFLOW (tem))
8096 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8098 /* Likewise, we can simplify a comparison of a real constant with
8099 a MINUS_EXPR whose first operand is also a real constant, i.e.
8100 (c1 - x) < c2 becomes x > c1-c2. */
8101 if (flag_unsafe_math_optimizations
8102 && TREE_CODE (arg1) == REAL_CST
8103 && TREE_CODE (arg0) == MINUS_EXPR
8104 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8105 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8107 && ! TREE_CONSTANT_OVERFLOW (tem))
8108 return fold_build2 (swap_tree_comparison (code), type,
8109 TREE_OPERAND (arg0, 1), tem);
8111 /* Fold comparisons against built-in math functions. */
8112 if (TREE_CODE (arg1) == REAL_CST
8113 && flag_unsafe_math_optimizations
8114 && ! flag_errno_math)
8116 enum built_in_function fcode = builtin_mathfn_code (arg0);
8118 if (fcode != END_BUILTINS)
8120 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8121 if (tem != NULL_TREE)
8127 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8128 if (TREE_CONSTANT (arg1)
8129 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8130 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8131 /* This optimization is invalid for ordered comparisons
8132 if CONST+INCR overflows or if foo+incr might overflow.
8133 This optimization is invalid for floating point due to rounding.
8134 For pointer types we assume overflow doesn't happen. */
8135 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8136 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8137 && (code == EQ_EXPR || code == NE_EXPR))))
8139 tree varop, newconst;
8141 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8143 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8144 arg1, TREE_OPERAND (arg0, 1));
8145 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8146 TREE_OPERAND (arg0, 0),
8147 TREE_OPERAND (arg0, 1));
8151 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8152 arg1, TREE_OPERAND (arg0, 1));
8153 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8154 TREE_OPERAND (arg0, 0),
8155 TREE_OPERAND (arg0, 1));
8159 /* If VAROP is a reference to a bitfield, we must mask
8160 the constant by the width of the field. */
8161 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8162 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8163 && host_integerp (DECL_SIZE (TREE_OPERAND
8164 (TREE_OPERAND (varop, 0), 1)), 1))
8166 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8167 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8168 tree folded_compare, shift;
8170 /* First check whether the comparison would come out
8171 always the same. If we don't do that we would
8172 change the meaning with the masking. */
8173 folded_compare = fold_build2 (code, type,
8174 TREE_OPERAND (varop, 0), arg1);
8175 if (TREE_CODE (folded_compare) == INTEGER_CST)
8176 return omit_one_operand (type, folded_compare, varop);
8178 shift = build_int_cst (NULL_TREE,
8179 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8180 shift = fold_convert (TREE_TYPE (varop), shift);
8181 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8183 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8187 return fold_build2 (code, type, varop, newconst);
8190 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8191 && (TREE_CODE (arg0) == NOP_EXPR
8192 || TREE_CODE (arg0) == CONVERT_EXPR))
8194 /* If we are widening one operand of an integer comparison,
8195 see if the other operand is similarly being widened. Perhaps we
8196 can do the comparison in the narrower type. */
8197 tem = fold_widened_comparison (code, type, arg0, arg1);
8201 /* Or if we are changing signedness. */
8202 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8207 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8208 constant, we can simplify it. */
8209 if (TREE_CODE (arg1) == INTEGER_CST
8210 && (TREE_CODE (arg0) == MIN_EXPR
8211 || TREE_CODE (arg0) == MAX_EXPR)
8212 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8214 tem = optimize_minmax_comparison (code, type, op0, op1);
8219 /* Simplify comparison of something with itself. (For IEEE
8220 floating-point, we can only do some of these simplifications.) */
8221 if (operand_equal_p (arg0, arg1, 0))
8226 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8227 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8228 return constant_boolean_node (1, type);
8233 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8234 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8235 return constant_boolean_node (1, type);
8236 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8239 /* For NE, we can only do this simplification if integer
8240 or we don't honor IEEE floating point NaNs. */
8241 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8242 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8244 /* ... fall through ... */
8247 return constant_boolean_node (0, type);
8253 /* If we are comparing an expression that just has comparisons
8254 of two integer values, arithmetic expressions of those comparisons,
8255 and constants, we can simplify it. There are only three cases
8256 to check: the two values can either be equal, the first can be
8257 greater, or the second can be greater. Fold the expression for
8258 those three values. Since each value must be 0 or 1, we have
8259 eight possibilities, each of which corresponds to the constant 0
8260 or 1 or one of the six possible comparisons.
8262 This handles common cases like (a > b) == 0 but also handles
8263 expressions like ((x > y) - (y > x)) > 0, which supposedly
8264 occur in macroized code. */
8266 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8268 tree cval1 = 0, cval2 = 0;
8271 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8272 /* Don't handle degenerate cases here; they should already
8273 have been handled anyway. */
8274 && cval1 != 0 && cval2 != 0
8275 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8276 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8277 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8278 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8279 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8280 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8281 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8283 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8284 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8286 /* We can't just pass T to eval_subst in case cval1 or cval2
8287 was the same as ARG1. */
8290 = fold_build2 (code, type,
8291 eval_subst (arg0, cval1, maxval,
8295 = fold_build2 (code, type,
8296 eval_subst (arg0, cval1, maxval,
8300 = fold_build2 (code, type,
8301 eval_subst (arg0, cval1, minval,
8305 /* All three of these results should be 0 or 1. Confirm they are.
8306 Then use those values to select the proper code to use. */
8308 if (TREE_CODE (high_result) == INTEGER_CST
8309 && TREE_CODE (equal_result) == INTEGER_CST
8310 && TREE_CODE (low_result) == INTEGER_CST)
8312 /* Make a 3-bit mask with the high-order bit being the
8313 value for `>', the next for '=', and the low for '<'. */
8314 switch ((integer_onep (high_result) * 4)
8315 + (integer_onep (equal_result) * 2)
8316 + integer_onep (low_result))
8320 return omit_one_operand (type, integer_zero_node, arg0);
8341 return omit_one_operand (type, integer_one_node, arg0);
8345 return save_expr (build2 (code, type, cval1, cval2));
8346 return fold_build2 (code, type, cval1, cval2);
8351 /* Fold a comparison of the address of COMPONENT_REFs with the same
8352 type and component to a comparison of the address of the base
8353 object. In short, &x->a OP &y->a to x OP y and
8354 &x->a OP &y.a to x OP &y */
8355 if (TREE_CODE (arg0) == ADDR_EXPR
8356 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8357 && TREE_CODE (arg1) == ADDR_EXPR
8358 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8360 tree cref0 = TREE_OPERAND (arg0, 0);
8361 tree cref1 = TREE_OPERAND (arg1, 0);
8362 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8364 tree op0 = TREE_OPERAND (cref0, 0);
8365 tree op1 = TREE_OPERAND (cref1, 0);
8366 return fold_build2 (code, type,
8367 build_fold_addr_expr (op0),
8368 build_fold_addr_expr (op1));
8372 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8373 into a single range test. */
8374 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8375 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8376 && TREE_CODE (arg1) == INTEGER_CST
8377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8378 && !integer_zerop (TREE_OPERAND (arg0, 1))
8379 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8380 && !TREE_OVERFLOW (arg1))
8382 tem = fold_div_compare (code, type, arg0, arg1);
8383 if (tem != NULL_TREE)
8387 /* Fold ~X op ~Y as Y op X. */
8388 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8389 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8390 return fold_build2 (code, type,
8391 TREE_OPERAND (arg1, 0),
8392 TREE_OPERAND (arg0, 0));
8394 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8395 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8396 && TREE_CODE (arg1) == INTEGER_CST)
8397 return fold_build2 (swap_tree_comparison (code), type,
8398 TREE_OPERAND (arg0, 0),
8399 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8405 /* Subroutine of fold_binary. Optimize complex multiplications of the
8406 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8407 argument EXPR represents the expression "z" of type TYPE. */
8410 fold_mult_zconjz (tree type, tree expr)
8412 tree itype = TREE_TYPE (type);
8413 tree rpart, ipart, tem;
8415 if (TREE_CODE (expr) == COMPLEX_EXPR)
8417 rpart = TREE_OPERAND (expr, 0);
8418 ipart = TREE_OPERAND (expr, 1);
8420 else if (TREE_CODE (expr) == COMPLEX_CST)
8422 rpart = TREE_REALPART (expr);
8423 ipart = TREE_IMAGPART (expr);
8427 expr = save_expr (expr);
8428 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8429 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8432 rpart = save_expr (rpart);
8433 ipart = save_expr (ipart);
8434 tem = fold_build2 (PLUS_EXPR, itype,
8435 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8436 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8437 return fold_build2 (COMPLEX_EXPR, type, tem,
8438 fold_convert (itype, integer_zero_node));
8442 /* Fold a binary expression of code CODE and type TYPE with operands
8443 OP0 and OP1. Return the folded expression if folding is
8444 successful. Otherwise, return NULL_TREE. */
8447 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8449 enum tree_code_class kind = TREE_CODE_CLASS (code);
8450 tree arg0, arg1, tem;
8451 tree t1 = NULL_TREE;
8453 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8454 && TREE_CODE_LENGTH (code) == 2
8456 && op1 != NULL_TREE);
8461 /* Strip any conversions that don't change the mode. This is
8462 safe for every expression, except for a comparison expression
8463 because its signedness is derived from its operands. So, in
8464 the latter case, only strip conversions that don't change the
8467 Note that this is done as an internal manipulation within the
8468 constant folder, in order to find the simplest representation
8469 of the arguments so that their form can be studied. In any
8470 cases, the appropriate type conversions should be put back in
8471 the tree that will get out of the constant folder. */
8473 if (kind == tcc_comparison)
8475 STRIP_SIGN_NOPS (arg0);
8476 STRIP_SIGN_NOPS (arg1);
8484 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8485 constant but we can't do arithmetic on them. */
8486 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8487 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8488 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8489 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8491 if (kind == tcc_binary)
8492 tem = const_binop (code, arg0, arg1, 0);
8493 else if (kind == tcc_comparison)
8494 tem = fold_relational_const (code, type, arg0, arg1);
8498 if (tem != NULL_TREE)
8500 if (TREE_TYPE (tem) != type)
8501 tem = fold_convert (type, tem);
8506 /* If this is a commutative operation, and ARG0 is a constant, move it
8507 to ARG1 to reduce the number of tests below. */
8508 if (commutative_tree_code (code)
8509 && tree_swap_operands_p (arg0, arg1, true))
8510 return fold_build2 (code, type, op1, op0);
8512 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8514 First check for cases where an arithmetic operation is applied to a
8515 compound, conditional, or comparison operation. Push the arithmetic
8516 operation inside the compound or conditional to see if any folding
8517 can then be done. Convert comparison to conditional for this purpose.
8518 The also optimizes non-constant cases that used to be done in
8521 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8522 one of the operands is a comparison and the other is a comparison, a
8523 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8524 code below would make the expression more complex. Change it to a
8525 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8526 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8528 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8529 || code == EQ_EXPR || code == NE_EXPR)
8530 && ((truth_value_p (TREE_CODE (arg0))
8531 && (truth_value_p (TREE_CODE (arg1))
8532 || (TREE_CODE (arg1) == BIT_AND_EXPR
8533 && integer_onep (TREE_OPERAND (arg1, 1)))))
8534 || (truth_value_p (TREE_CODE (arg1))
8535 && (truth_value_p (TREE_CODE (arg0))
8536 || (TREE_CODE (arg0) == BIT_AND_EXPR
8537 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8539 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8540 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8543 fold_convert (boolean_type_node, arg0),
8544 fold_convert (boolean_type_node, arg1));
8546 if (code == EQ_EXPR)
8547 tem = invert_truthvalue (tem);
8549 return fold_convert (type, tem);
8552 if (TREE_CODE_CLASS (code) == tcc_binary
8553 || TREE_CODE_CLASS (code) == tcc_comparison)
8555 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8556 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8557 fold_build2 (code, type,
8558 TREE_OPERAND (arg0, 1), op1));
8559 if (TREE_CODE (arg1) == COMPOUND_EXPR
8560 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8561 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8562 fold_build2 (code, type,
8563 op0, TREE_OPERAND (arg1, 1)));
8565 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8567 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8569 /*cond_first_p=*/1);
8570 if (tem != NULL_TREE)
8574 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8576 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8578 /*cond_first_p=*/0);
8579 if (tem != NULL_TREE)
8587 /* A + (-B) -> A - B */
8588 if (TREE_CODE (arg1) == NEGATE_EXPR)
8589 return fold_build2 (MINUS_EXPR, type,
8590 fold_convert (type, arg0),
8591 fold_convert (type, TREE_OPERAND (arg1, 0)));
8592 /* (-A) + B -> B - A */
8593 if (TREE_CODE (arg0) == NEGATE_EXPR
8594 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8595 return fold_build2 (MINUS_EXPR, type,
8596 fold_convert (type, arg1),
8597 fold_convert (type, TREE_OPERAND (arg0, 0)));
8598 /* Convert ~A + 1 to -A. */
8599 if (INTEGRAL_TYPE_P (type)
8600 && TREE_CODE (arg0) == BIT_NOT_EXPR
8601 && integer_onep (arg1))
8602 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8604 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8606 if ((TREE_CODE (arg0) == MULT_EXPR
8607 || TREE_CODE (arg1) == MULT_EXPR)
8608 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8610 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8615 if (! FLOAT_TYPE_P (type))
8617 if (integer_zerop (arg1))
8618 return non_lvalue (fold_convert (type, arg0));
8620 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8621 with a constant, and the two constants have no bits in common,
8622 we should treat this as a BIT_IOR_EXPR since this may produce more
8624 if (TREE_CODE (arg0) == BIT_AND_EXPR
8625 && TREE_CODE (arg1) == BIT_AND_EXPR
8626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8627 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8628 && integer_zerop (const_binop (BIT_AND_EXPR,
8629 TREE_OPERAND (arg0, 1),
8630 TREE_OPERAND (arg1, 1), 0)))
8632 code = BIT_IOR_EXPR;
8636 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8637 (plus (plus (mult) (mult)) (foo)) so that we can
8638 take advantage of the factoring cases below. */
8639 if (((TREE_CODE (arg0) == PLUS_EXPR
8640 || TREE_CODE (arg0) == MINUS_EXPR)
8641 && TREE_CODE (arg1) == MULT_EXPR)
8642 || ((TREE_CODE (arg1) == PLUS_EXPR
8643 || TREE_CODE (arg1) == MINUS_EXPR)
8644 && TREE_CODE (arg0) == MULT_EXPR))
8646 tree parg0, parg1, parg, marg;
8647 enum tree_code pcode;
8649 if (TREE_CODE (arg1) == MULT_EXPR)
8650 parg = arg0, marg = arg1;
8652 parg = arg1, marg = arg0;
8653 pcode = TREE_CODE (parg);
8654 parg0 = TREE_OPERAND (parg, 0);
8655 parg1 = TREE_OPERAND (parg, 1);
8659 if (TREE_CODE (parg0) == MULT_EXPR
8660 && TREE_CODE (parg1) != MULT_EXPR)
8661 return fold_build2 (pcode, type,
8662 fold_build2 (PLUS_EXPR, type,
8663 fold_convert (type, parg0),
8664 fold_convert (type, marg)),
8665 fold_convert (type, parg1));
8666 if (TREE_CODE (parg0) != MULT_EXPR
8667 && TREE_CODE (parg1) == MULT_EXPR)
8668 return fold_build2 (PLUS_EXPR, type,
8669 fold_convert (type, parg0),
8670 fold_build2 (pcode, type,
8671 fold_convert (type, marg),
8676 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8677 of the array. Loop optimizer sometimes produce this type of
8679 if (TREE_CODE (arg0) == ADDR_EXPR)
8681 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8683 return fold_convert (type, tem);
8685 else if (TREE_CODE (arg1) == ADDR_EXPR)
8687 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8689 return fold_convert (type, tem);
8694 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8695 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8696 return non_lvalue (fold_convert (type, arg0));
8698 /* Likewise if the operands are reversed. */
8699 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8700 return non_lvalue (fold_convert (type, arg1));
8702 /* Convert X + -C into X - C. */
8703 if (TREE_CODE (arg1) == REAL_CST
8704 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8706 tem = fold_negate_const (arg1, type);
8707 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8708 return fold_build2 (MINUS_EXPR, type,
8709 fold_convert (type, arg0),
8710 fold_convert (type, tem));
8713 if (flag_unsafe_math_optimizations
8714 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8715 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8716 && (tem = distribute_real_division (code, type, arg0, arg1)))
8719 /* Convert x+x into x*2.0. */
8720 if (operand_equal_p (arg0, arg1, 0)
8721 && SCALAR_FLOAT_TYPE_P (type))
8722 return fold_build2 (MULT_EXPR, type, arg0,
8723 build_real (type, dconst2));
8725 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8726 if (flag_unsafe_math_optimizations
8727 && TREE_CODE (arg1) == PLUS_EXPR
8728 && TREE_CODE (arg0) != MULT_EXPR)
8730 tree tree10 = TREE_OPERAND (arg1, 0);
8731 tree tree11 = TREE_OPERAND (arg1, 1);
8732 if (TREE_CODE (tree11) == MULT_EXPR
8733 && TREE_CODE (tree10) == MULT_EXPR)
8736 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8737 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8740 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8741 if (flag_unsafe_math_optimizations
8742 && TREE_CODE (arg0) == PLUS_EXPR
8743 && TREE_CODE (arg1) != MULT_EXPR)
8745 tree tree00 = TREE_OPERAND (arg0, 0);
8746 tree tree01 = TREE_OPERAND (arg0, 1);
8747 if (TREE_CODE (tree01) == MULT_EXPR
8748 && TREE_CODE (tree00) == MULT_EXPR)
8751 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8752 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8758 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8759 is a rotate of A by C1 bits. */
8760 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8761 is a rotate of A by B bits. */
8763 enum tree_code code0, code1;
8764 code0 = TREE_CODE (arg0);
8765 code1 = TREE_CODE (arg1);
8766 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8767 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8768 && operand_equal_p (TREE_OPERAND (arg0, 0),
8769 TREE_OPERAND (arg1, 0), 0)
8770 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8772 tree tree01, tree11;
8773 enum tree_code code01, code11;
8775 tree01 = TREE_OPERAND (arg0, 1);
8776 tree11 = TREE_OPERAND (arg1, 1);
8777 STRIP_NOPS (tree01);
8778 STRIP_NOPS (tree11);
8779 code01 = TREE_CODE (tree01);
8780 code11 = TREE_CODE (tree11);
8781 if (code01 == INTEGER_CST
8782 && code11 == INTEGER_CST
8783 && TREE_INT_CST_HIGH (tree01) == 0
8784 && TREE_INT_CST_HIGH (tree11) == 0
8785 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8786 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8787 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8788 code0 == LSHIFT_EXPR ? tree01 : tree11);
8789 else if (code11 == MINUS_EXPR)
8791 tree tree110, tree111;
8792 tree110 = TREE_OPERAND (tree11, 0);
8793 tree111 = TREE_OPERAND (tree11, 1);
8794 STRIP_NOPS (tree110);
8795 STRIP_NOPS (tree111);
8796 if (TREE_CODE (tree110) == INTEGER_CST
8797 && 0 == compare_tree_int (tree110,
8799 (TREE_TYPE (TREE_OPERAND
8801 && operand_equal_p (tree01, tree111, 0))
8802 return build2 ((code0 == LSHIFT_EXPR
8805 type, TREE_OPERAND (arg0, 0), tree01);
8807 else if (code01 == MINUS_EXPR)
8809 tree tree010, tree011;
8810 tree010 = TREE_OPERAND (tree01, 0);
8811 tree011 = TREE_OPERAND (tree01, 1);
8812 STRIP_NOPS (tree010);
8813 STRIP_NOPS (tree011);
8814 if (TREE_CODE (tree010) == INTEGER_CST
8815 && 0 == compare_tree_int (tree010,
8817 (TREE_TYPE (TREE_OPERAND
8819 && operand_equal_p (tree11, tree011, 0))
8820 return build2 ((code0 != LSHIFT_EXPR
8823 type, TREE_OPERAND (arg0, 0), tree11);
8829 /* In most languages, can't associate operations on floats through
8830 parentheses. Rather than remember where the parentheses were, we
8831 don't associate floats at all, unless the user has specified
8832 -funsafe-math-optimizations. */
8834 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8836 tree var0, con0, lit0, minus_lit0;
8837 tree var1, con1, lit1, minus_lit1;
8839 /* Split both trees into variables, constants, and literals. Then
8840 associate each group together, the constants with literals,
8841 then the result with variables. This increases the chances of
8842 literals being recombined later and of generating relocatable
8843 expressions for the sum of a constant and literal. */
8844 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8845 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8846 code == MINUS_EXPR);
8848 /* Only do something if we found more than two objects. Otherwise,
8849 nothing has changed and we risk infinite recursion. */
8850 if (2 < ((var0 != 0) + (var1 != 0)
8851 + (con0 != 0) + (con1 != 0)
8852 + (lit0 != 0) + (lit1 != 0)
8853 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8855 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8856 if (code == MINUS_EXPR)
8859 var0 = associate_trees (var0, var1, code, type);
8860 con0 = associate_trees (con0, con1, code, type);
8861 lit0 = associate_trees (lit0, lit1, code, type);
8862 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8864 /* Preserve the MINUS_EXPR if the negative part of the literal is
8865 greater than the positive part. Otherwise, the multiplicative
8866 folding code (i.e extract_muldiv) may be fooled in case
8867 unsigned constants are subtracted, like in the following
8868 example: ((X*2 + 4) - 8U)/2. */
8869 if (minus_lit0 && lit0)
8871 if (TREE_CODE (lit0) == INTEGER_CST
8872 && TREE_CODE (minus_lit0) == INTEGER_CST
8873 && tree_int_cst_lt (lit0, minus_lit0))
8875 minus_lit0 = associate_trees (minus_lit0, lit0,
8881 lit0 = associate_trees (lit0, minus_lit0,
8889 return fold_convert (type,
8890 associate_trees (var0, minus_lit0,
8894 con0 = associate_trees (con0, minus_lit0,
8896 return fold_convert (type,
8897 associate_trees (var0, con0,
8902 con0 = associate_trees (con0, lit0, code, type);
8903 return fold_convert (type, associate_trees (var0, con0,
8911 /* A - (-B) -> A + B */
8912 if (TREE_CODE (arg1) == NEGATE_EXPR)
8913 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8914 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8915 if (TREE_CODE (arg0) == NEGATE_EXPR
8916 && (FLOAT_TYPE_P (type)
8917 || INTEGRAL_TYPE_P (type))
8918 && negate_expr_p (arg1)
8919 && reorder_operands_p (arg0, arg1))
8920 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8921 TREE_OPERAND (arg0, 0));
8922 /* Convert -A - 1 to ~A. */
8923 if (INTEGRAL_TYPE_P (type)
8924 && TREE_CODE (arg0) == NEGATE_EXPR
8925 && integer_onep (arg1))
8926 return fold_build1 (BIT_NOT_EXPR, type,
8927 fold_convert (type, TREE_OPERAND (arg0, 0)));
8929 /* Convert -1 - A to ~A. */
8930 if (INTEGRAL_TYPE_P (type)
8931 && integer_all_onesp (arg0))
8932 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8934 if (! FLOAT_TYPE_P (type))
8936 if (integer_zerop (arg0))
8937 return negate_expr (fold_convert (type, arg1));
8938 if (integer_zerop (arg1))
8939 return non_lvalue (fold_convert (type, arg0));
8941 /* Fold A - (A & B) into ~B & A. */
8942 if (!TREE_SIDE_EFFECTS (arg0)
8943 && TREE_CODE (arg1) == BIT_AND_EXPR)
8945 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8946 return fold_build2 (BIT_AND_EXPR, type,
8947 fold_build1 (BIT_NOT_EXPR, type,
8948 TREE_OPERAND (arg1, 0)),
8950 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8951 return fold_build2 (BIT_AND_EXPR, type,
8952 fold_build1 (BIT_NOT_EXPR, type,
8953 TREE_OPERAND (arg1, 1)),
8957 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8958 any power of 2 minus 1. */
8959 if (TREE_CODE (arg0) == BIT_AND_EXPR
8960 && TREE_CODE (arg1) == BIT_AND_EXPR
8961 && operand_equal_p (TREE_OPERAND (arg0, 0),
8962 TREE_OPERAND (arg1, 0), 0))
8964 tree mask0 = TREE_OPERAND (arg0, 1);
8965 tree mask1 = TREE_OPERAND (arg1, 1);
8966 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8968 if (operand_equal_p (tem, mask1, 0))
8970 tem = fold_build2 (BIT_XOR_EXPR, type,
8971 TREE_OPERAND (arg0, 0), mask1);
8972 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8977 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8978 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8979 return non_lvalue (fold_convert (type, arg0));
8981 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8982 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8983 (-ARG1 + ARG0) reduces to -ARG1. */
8984 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8985 return negate_expr (fold_convert (type, arg1));
8987 /* Fold &x - &x. This can happen from &x.foo - &x.
8988 This is unsafe for certain floats even in non-IEEE formats.
8989 In IEEE, it is unsafe because it does wrong for NaNs.
8990 Also note that operand_equal_p is always false if an operand
8993 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8994 && operand_equal_p (arg0, arg1, 0))
8995 return fold_convert (type, integer_zero_node);
8997 /* A - B -> A + (-B) if B is easily negatable. */
8998 if (negate_expr_p (arg1)
8999 && ((FLOAT_TYPE_P (type)
9000 /* Avoid this transformation if B is a positive REAL_CST. */
9001 && (TREE_CODE (arg1) != REAL_CST
9002 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9003 || INTEGRAL_TYPE_P (type)))
9004 return fold_build2 (PLUS_EXPR, type,
9005 fold_convert (type, arg0),
9006 fold_convert (type, negate_expr (arg1)));
9008 /* Try folding difference of addresses. */
9012 if ((TREE_CODE (arg0) == ADDR_EXPR
9013 || TREE_CODE (arg1) == ADDR_EXPR)
9014 && ptr_difference_const (arg0, arg1, &diff))
9015 return build_int_cst_type (type, diff);
9018 /* Fold &a[i] - &a[j] to i-j. */
9019 if (TREE_CODE (arg0) == ADDR_EXPR
9020 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9021 && TREE_CODE (arg1) == ADDR_EXPR
9022 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9024 tree aref0 = TREE_OPERAND (arg0, 0);
9025 tree aref1 = TREE_OPERAND (arg1, 0);
9026 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9027 TREE_OPERAND (aref1, 0), 0))
9029 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9030 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9031 tree esz = array_ref_element_size (aref0);
9032 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9033 return fold_build2 (MULT_EXPR, type, diff,
9034 fold_convert (type, esz));
9039 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9040 of the array. Loop optimizer sometimes produce this type of
9042 if (TREE_CODE (arg0) == ADDR_EXPR)
9044 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9046 return fold_convert (type, tem);
9049 if (flag_unsafe_math_optimizations
9050 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9051 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9052 && (tem = distribute_real_division (code, type, arg0, arg1)))
9055 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9057 if ((TREE_CODE (arg0) == MULT_EXPR
9058 || TREE_CODE (arg1) == MULT_EXPR)
9059 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9061 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9069 /* (-A) * (-B) -> A * B */
9070 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9071 return fold_build2 (MULT_EXPR, type,
9072 fold_convert (type, TREE_OPERAND (arg0, 0)),
9073 fold_convert (type, negate_expr (arg1)));
9074 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9075 return fold_build2 (MULT_EXPR, type,
9076 fold_convert (type, negate_expr (arg0)),
9077 fold_convert (type, TREE_OPERAND (arg1, 0)));
9079 if (! FLOAT_TYPE_P (type))
9081 if (integer_zerop (arg1))
9082 return omit_one_operand (type, arg1, arg0);
9083 if (integer_onep (arg1))
9084 return non_lvalue (fold_convert (type, arg0));
9085 /* Transform x * -1 into -x. */
9086 if (integer_all_onesp (arg1))
9087 return fold_convert (type, negate_expr (arg0));
9088 /* Transform x * -C into -x * C if x is easily negatable. */
9089 if (TREE_CODE (arg1) == INTEGER_CST
9090 && tree_int_cst_sgn (arg1) == -1
9091 && negate_expr_p (arg0)
9092 && (tem = negate_expr (arg1)) != arg1
9093 && !TREE_OVERFLOW (tem))
9094 return fold_build2 (MULT_EXPR, type,
9095 negate_expr (arg0), tem);
9097 /* (a * (1 << b)) is (a << b) */
9098 if (TREE_CODE (arg1) == LSHIFT_EXPR
9099 && integer_onep (TREE_OPERAND (arg1, 0)))
9100 return fold_build2 (LSHIFT_EXPR, type, arg0,
9101 TREE_OPERAND (arg1, 1));
9102 if (TREE_CODE (arg0) == LSHIFT_EXPR
9103 && integer_onep (TREE_OPERAND (arg0, 0)))
9104 return fold_build2 (LSHIFT_EXPR, type, arg1,
9105 TREE_OPERAND (arg0, 1));
9107 if (TREE_CODE (arg1) == INTEGER_CST
9108 && 0 != (tem = extract_muldiv (op0,
9109 fold_convert (type, arg1),
9111 return fold_convert (type, tem);
9113 /* Optimize z * conj(z) for integer complex numbers. */
9114 if (TREE_CODE (arg0) == CONJ_EXPR
9115 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9116 return fold_mult_zconjz (type, arg1);
9117 if (TREE_CODE (arg1) == CONJ_EXPR
9118 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9119 return fold_mult_zconjz (type, arg0);
9123 /* Maybe fold x * 0 to 0. The expressions aren't the same
9124 when x is NaN, since x * 0 is also NaN. Nor are they the
9125 same in modes with signed zeros, since multiplying a
9126 negative value by 0 gives -0, not +0. */
9127 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9128 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9129 && real_zerop (arg1))
9130 return omit_one_operand (type, arg1, arg0);
9131 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9132 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9133 && real_onep (arg1))
9134 return non_lvalue (fold_convert (type, arg0));
9136 /* Transform x * -1.0 into -x. */
9137 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9138 && real_minus_onep (arg1))
9139 return fold_convert (type, negate_expr (arg0));
9141 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9142 if (flag_unsafe_math_optimizations
9143 && TREE_CODE (arg0) == RDIV_EXPR
9144 && TREE_CODE (arg1) == REAL_CST
9145 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9147 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9150 return fold_build2 (RDIV_EXPR, type, tem,
9151 TREE_OPERAND (arg0, 1));
9154 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9155 if (operand_equal_p (arg0, arg1, 0))
9157 tree tem = fold_strip_sign_ops (arg0);
9158 if (tem != NULL_TREE)
9160 tem = fold_convert (type, tem);
9161 return fold_build2 (MULT_EXPR, type, tem, tem);
9165 /* Optimize z * conj(z) for floating point complex numbers.
9166 Guarded by flag_unsafe_math_optimizations as non-finite
9167 imaginary components don't produce scalar results. */
9168 if (flag_unsafe_math_optimizations
9169 && TREE_CODE (arg0) == CONJ_EXPR
9170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9171 return fold_mult_zconjz (type, arg1);
9172 if (flag_unsafe_math_optimizations
9173 && TREE_CODE (arg1) == CONJ_EXPR
9174 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9175 return fold_mult_zconjz (type, arg0);
9177 if (flag_unsafe_math_optimizations)
9179 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9180 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9182 /* Optimizations of root(...)*root(...). */
9183 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9185 tree rootfn, arg, arglist;
9186 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9187 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9189 /* Optimize sqrt(x)*sqrt(x) as x. */
9190 if (BUILTIN_SQRT_P (fcode0)
9191 && operand_equal_p (arg00, arg10, 0)
9192 && ! HONOR_SNANS (TYPE_MODE (type)))
9195 /* Optimize root(x)*root(y) as root(x*y). */
9196 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9197 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9198 arglist = build_tree_list (NULL_TREE, arg);
9199 return build_function_call_expr (rootfn, arglist);
9202 /* Optimize expN(x)*expN(y) as expN(x+y). */
9203 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9205 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9206 tree arg = fold_build2 (PLUS_EXPR, type,
9207 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9208 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9209 tree arglist = build_tree_list (NULL_TREE, arg);
9210 return build_function_call_expr (expfn, arglist);
9213 /* Optimizations of pow(...)*pow(...). */
9214 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9215 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9216 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9218 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9219 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9221 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9222 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9225 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9226 if (operand_equal_p (arg01, arg11, 0))
9228 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9229 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9230 tree arglist = tree_cons (NULL_TREE, arg,
9231 build_tree_list (NULL_TREE,
9233 return build_function_call_expr (powfn, arglist);
9236 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9237 if (operand_equal_p (arg00, arg10, 0))
9239 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9240 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9241 tree arglist = tree_cons (NULL_TREE, arg00,
9242 build_tree_list (NULL_TREE,
9244 return build_function_call_expr (powfn, arglist);
9248 /* Optimize tan(x)*cos(x) as sin(x). */
9249 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9250 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9251 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9252 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9253 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9254 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9255 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9256 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9258 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9260 if (sinfn != NULL_TREE)
9261 return build_function_call_expr (sinfn,
9262 TREE_OPERAND (arg0, 1));
9265 /* Optimize x*pow(x,c) as pow(x,c+1). */
9266 if (fcode1 == BUILT_IN_POW
9267 || fcode1 == BUILT_IN_POWF
9268 || fcode1 == BUILT_IN_POWL)
9270 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9271 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9273 if (TREE_CODE (arg11) == REAL_CST
9274 && ! TREE_CONSTANT_OVERFLOW (arg11)
9275 && operand_equal_p (arg0, arg10, 0))
9277 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9281 c = TREE_REAL_CST (arg11);
9282 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9283 arg = build_real (type, c);
9284 arglist = build_tree_list (NULL_TREE, arg);
9285 arglist = tree_cons (NULL_TREE, arg0, arglist);
9286 return build_function_call_expr (powfn, arglist);
9290 /* Optimize pow(x,c)*x as pow(x,c+1). */
9291 if (fcode0 == BUILT_IN_POW
9292 || fcode0 == BUILT_IN_POWF
9293 || fcode0 == BUILT_IN_POWL)
9295 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9296 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9298 if (TREE_CODE (arg01) == REAL_CST
9299 && ! TREE_CONSTANT_OVERFLOW (arg01)
9300 && operand_equal_p (arg1, arg00, 0))
9302 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9306 c = TREE_REAL_CST (arg01);
9307 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9308 arg = build_real (type, c);
9309 arglist = build_tree_list (NULL_TREE, arg);
9310 arglist = tree_cons (NULL_TREE, arg1, arglist);
9311 return build_function_call_expr (powfn, arglist);
9315 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9317 && operand_equal_p (arg0, arg1, 0))
9319 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9323 tree arg = build_real (type, dconst2);
9324 tree arglist = build_tree_list (NULL_TREE, arg);
9325 arglist = tree_cons (NULL_TREE, arg0, arglist);
9326 return build_function_call_expr (powfn, arglist);
9335 if (integer_all_onesp (arg1))
9336 return omit_one_operand (type, arg1, arg0);
9337 if (integer_zerop (arg1))
9338 return non_lvalue (fold_convert (type, arg0));
9339 if (operand_equal_p (arg0, arg1, 0))
9340 return non_lvalue (fold_convert (type, arg0));
9343 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9344 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9346 t1 = build_int_cst (type, -1);
9347 t1 = force_fit_type (t1, 0, false, false);
9348 return omit_one_operand (type, t1, arg1);
9352 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9353 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9355 t1 = build_int_cst (type, -1);
9356 t1 = force_fit_type (t1, 0, false, false);
9357 return omit_one_operand (type, t1, arg0);
9360 /* Canonicalize (X & C1) | C2. */
9361 if (TREE_CODE (arg0) == BIT_AND_EXPR
9362 && TREE_CODE (arg1) == INTEGER_CST
9363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9365 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9366 int width = TYPE_PRECISION (type);
9367 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9368 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9369 hi2 = TREE_INT_CST_HIGH (arg1);
9370 lo2 = TREE_INT_CST_LOW (arg1);
9372 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9373 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9374 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9376 if (width > HOST_BITS_PER_WIDE_INT)
9378 mhi = (unsigned HOST_WIDE_INT) -1
9379 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9385 mlo = (unsigned HOST_WIDE_INT) -1
9386 >> (HOST_BITS_PER_WIDE_INT - width);
9389 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9390 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9391 return fold_build2 (BIT_IOR_EXPR, type,
9392 TREE_OPERAND (arg0, 0), arg1);
9394 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9397 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9398 return fold_build2 (BIT_IOR_EXPR, type,
9399 fold_build2 (BIT_AND_EXPR, type,
9400 TREE_OPERAND (arg0, 0),
9401 build_int_cst_wide (type,
9407 /* (X & Y) | Y is (X, Y). */
9408 if (TREE_CODE (arg0) == BIT_AND_EXPR
9409 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9410 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9411 /* (X & Y) | X is (Y, X). */
9412 if (TREE_CODE (arg0) == BIT_AND_EXPR
9413 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9414 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9415 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9416 /* X | (X & Y) is (Y, X). */
9417 if (TREE_CODE (arg1) == BIT_AND_EXPR
9418 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9419 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9420 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9421 /* X | (Y & X) is (Y, X). */
9422 if (TREE_CODE (arg1) == BIT_AND_EXPR
9423 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9424 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9425 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9427 t1 = distribute_bit_expr (code, type, arg0, arg1);
9428 if (t1 != NULL_TREE)
9431 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9433 This results in more efficient code for machines without a NAND
9434 instruction. Combine will canonicalize to the first form
9435 which will allow use of NAND instructions provided by the
9436 backend if they exist. */
9437 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9438 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9440 return fold_build1 (BIT_NOT_EXPR, type,
9441 build2 (BIT_AND_EXPR, type,
9442 TREE_OPERAND (arg0, 0),
9443 TREE_OPERAND (arg1, 0)));
9446 /* See if this can be simplified into a rotate first. If that
9447 is unsuccessful continue in the association code. */
9451 if (integer_zerop (arg1))
9452 return non_lvalue (fold_convert (type, arg0));
9453 if (integer_all_onesp (arg1))
9454 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9455 if (operand_equal_p (arg0, arg1, 0))
9456 return omit_one_operand (type, integer_zero_node, arg0);
9459 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9460 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9462 t1 = build_int_cst (type, -1);
9463 t1 = force_fit_type (t1, 0, false, false);
9464 return omit_one_operand (type, t1, arg1);
9468 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9469 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9471 t1 = build_int_cst (type, -1);
9472 t1 = force_fit_type (t1, 0, false, false);
9473 return omit_one_operand (type, t1, arg0);
9476 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9477 with a constant, and the two constants have no bits in common,
9478 we should treat this as a BIT_IOR_EXPR since this may produce more
9480 if (TREE_CODE (arg0) == BIT_AND_EXPR
9481 && TREE_CODE (arg1) == BIT_AND_EXPR
9482 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9483 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9484 && integer_zerop (const_binop (BIT_AND_EXPR,
9485 TREE_OPERAND (arg0, 1),
9486 TREE_OPERAND (arg1, 1), 0)))
9488 code = BIT_IOR_EXPR;
9492 /* (X | Y) ^ X -> Y & ~ X*/
9493 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9494 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9496 tree t2 = TREE_OPERAND (arg0, 1);
9497 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9499 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9500 fold_convert (type, t1));
9504 /* (Y | X) ^ X -> Y & ~ X*/
9505 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9506 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9508 tree t2 = TREE_OPERAND (arg0, 0);
9509 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9511 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9512 fold_convert (type, t1));
9516 /* X ^ (X | Y) -> Y & ~ X*/
9517 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9518 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9520 tree t2 = TREE_OPERAND (arg1, 1);
9521 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9523 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9524 fold_convert (type, t1));
9528 /* X ^ (Y | X) -> Y & ~ X*/
9529 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9530 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9532 tree t2 = TREE_OPERAND (arg1, 0);
9533 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9535 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9536 fold_convert (type, t1));
9540 /* Convert ~X ^ ~Y to X ^ Y. */
9541 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9542 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9543 return fold_build2 (code, type,
9544 fold_convert (type, TREE_OPERAND (arg0, 0)),
9545 fold_convert (type, TREE_OPERAND (arg1, 0)));
9547 /* Convert ~X ^ C to X ^ ~C. */
9548 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9549 && TREE_CODE (arg1) == INTEGER_CST)
9550 return fold_build2 (code, type,
9551 fold_convert (type, TREE_OPERAND (arg0, 0)),
9552 fold_build1 (BIT_NOT_EXPR, type, arg1));
9554 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9555 if (TREE_CODE (arg0) == BIT_AND_EXPR
9556 && integer_onep (TREE_OPERAND (arg0, 1))
9557 && integer_onep (arg1))
9558 return fold_build2 (EQ_EXPR, type, arg0,
9559 build_int_cst (TREE_TYPE (arg0), 0));
9561 /* Fold (X & Y) ^ Y as ~X & Y. */
9562 if (TREE_CODE (arg0) == BIT_AND_EXPR
9563 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9565 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9566 return fold_build2 (BIT_AND_EXPR, type,
9567 fold_build1 (BIT_NOT_EXPR, type, tem),
9568 fold_convert (type, arg1));
9570 /* Fold (X & Y) ^ X as ~Y & X. */
9571 if (TREE_CODE (arg0) == BIT_AND_EXPR
9572 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9573 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9575 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9576 return fold_build2 (BIT_AND_EXPR, type,
9577 fold_build1 (BIT_NOT_EXPR, type, tem),
9578 fold_convert (type, arg1));
9580 /* Fold X ^ (X & Y) as X & ~Y. */
9581 if (TREE_CODE (arg1) == BIT_AND_EXPR
9582 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9584 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9585 return fold_build2 (BIT_AND_EXPR, type,
9586 fold_convert (type, arg0),
9587 fold_build1 (BIT_NOT_EXPR, type, tem));
9589 /* Fold X ^ (Y & X) as ~Y & X. */
9590 if (TREE_CODE (arg1) == BIT_AND_EXPR
9591 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9592 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9594 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9595 return fold_build2 (BIT_AND_EXPR, type,
9596 fold_build1 (BIT_NOT_EXPR, type, tem),
9597 fold_convert (type, arg0));
9600 /* See if this can be simplified into a rotate first. If that
9601 is unsuccessful continue in the association code. */
9605 if (integer_all_onesp (arg1))
9606 return non_lvalue (fold_convert (type, arg0));
9607 if (integer_zerop (arg1))
9608 return omit_one_operand (type, arg1, arg0);
9609 if (operand_equal_p (arg0, arg1, 0))
9610 return non_lvalue (fold_convert (type, arg0));
9612 /* ~X & X is always zero. */
9613 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9614 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9615 return omit_one_operand (type, integer_zero_node, arg1);
9617 /* X & ~X is always zero. */
9618 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9619 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9620 return omit_one_operand (type, integer_zero_node, arg0);
9622 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9623 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9624 && TREE_CODE (arg1) == INTEGER_CST
9625 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9626 return fold_build2 (BIT_IOR_EXPR, type,
9627 fold_build2 (BIT_AND_EXPR, type,
9628 TREE_OPERAND (arg0, 0), arg1),
9629 fold_build2 (BIT_AND_EXPR, type,
9630 TREE_OPERAND (arg0, 1), arg1));
9632 /* (X | Y) & Y is (X, Y). */
9633 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9634 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9635 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9636 /* (X | Y) & X is (Y, X). */
9637 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9638 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9639 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9640 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9641 /* X & (X | Y) is (Y, X). */
9642 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9643 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9644 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9645 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9646 /* X & (Y | X) is (Y, X). */
9647 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9648 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9649 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9650 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9652 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9653 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9654 && integer_onep (TREE_OPERAND (arg0, 1))
9655 && integer_onep (arg1))
9657 tem = TREE_OPERAND (arg0, 0);
9658 return fold_build2 (EQ_EXPR, type,
9659 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9660 build_int_cst (TREE_TYPE (tem), 1)),
9661 build_int_cst (TREE_TYPE (tem), 0));
9663 /* Fold ~X & 1 as (X & 1) == 0. */
9664 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9665 && integer_onep (arg1))
9667 tem = TREE_OPERAND (arg0, 0);
9668 return fold_build2 (EQ_EXPR, type,
9669 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9670 build_int_cst (TREE_TYPE (tem), 1)),
9671 build_int_cst (TREE_TYPE (tem), 0));
9674 /* Fold (X ^ Y) & Y as ~X & Y. */
9675 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9676 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9678 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9679 return fold_build2 (BIT_AND_EXPR, type,
9680 fold_build1 (BIT_NOT_EXPR, type, tem),
9681 fold_convert (type, arg1));
9683 /* Fold (X ^ Y) & X as ~Y & X. */
9684 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9685 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9686 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9688 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9689 return fold_build2 (BIT_AND_EXPR, type,
9690 fold_build1 (BIT_NOT_EXPR, type, tem),
9691 fold_convert (type, arg1));
9693 /* Fold X & (X ^ Y) as X & ~Y. */
9694 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9695 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9697 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9698 return fold_build2 (BIT_AND_EXPR, type,
9699 fold_convert (type, arg0),
9700 fold_build1 (BIT_NOT_EXPR, type, tem));
9702 /* Fold X & (Y ^ X) as ~Y & X. */
9703 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9704 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9705 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9707 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9708 return fold_build2 (BIT_AND_EXPR, type,
9709 fold_build1 (BIT_NOT_EXPR, type, tem),
9710 fold_convert (type, arg0));
9713 t1 = distribute_bit_expr (code, type, arg0, arg1);
9714 if (t1 != NULL_TREE)
9716 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9717 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9718 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9721 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9723 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9724 && (~TREE_INT_CST_LOW (arg1)
9725 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9726 return fold_convert (type, TREE_OPERAND (arg0, 0));
9729 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9731 This results in more efficient code for machines without a NOR
9732 instruction. Combine will canonicalize to the first form
9733 which will allow use of NOR instructions provided by the
9734 backend if they exist. */
9735 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9736 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9738 return fold_build1 (BIT_NOT_EXPR, type,
9739 build2 (BIT_IOR_EXPR, type,
9740 TREE_OPERAND (arg0, 0),
9741 TREE_OPERAND (arg1, 0)));
9747 /* Don't touch a floating-point divide by zero unless the mode
9748 of the constant can represent infinity. */
9749 if (TREE_CODE (arg1) == REAL_CST
9750 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9751 && real_zerop (arg1))
9754 /* Optimize A / A to 1.0 if we don't care about
9755 NaNs or Infinities. Skip the transformation
9756 for non-real operands. */
9757 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9758 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9759 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9760 && operand_equal_p (arg0, arg1, 0))
9762 tree r = build_real (TREE_TYPE (arg0), dconst1);
9764 return omit_two_operands (type, r, arg0, arg1);
9767 /* The complex version of the above A / A optimization. */
9768 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9769 && operand_equal_p (arg0, arg1, 0))
9771 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9772 if (! HONOR_NANS (TYPE_MODE (elem_type))
9773 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9775 tree r = build_real (elem_type, dconst1);
9776 /* omit_two_operands will call fold_convert for us. */
9777 return omit_two_operands (type, r, arg0, arg1);
9781 /* (-A) / (-B) -> A / B */
9782 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9783 return fold_build2 (RDIV_EXPR, type,
9784 TREE_OPERAND (arg0, 0),
9785 negate_expr (arg1));
9786 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9787 return fold_build2 (RDIV_EXPR, type,
9789 TREE_OPERAND (arg1, 0));
9791 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9792 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9793 && real_onep (arg1))
9794 return non_lvalue (fold_convert (type, arg0));
9796 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9797 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9798 && real_minus_onep (arg1))
9799 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9801 /* If ARG1 is a constant, we can convert this to a multiply by the
9802 reciprocal. This does not have the same rounding properties,
9803 so only do this if -funsafe-math-optimizations. We can actually
9804 always safely do it if ARG1 is a power of two, but it's hard to
9805 tell if it is or not in a portable manner. */
9806 if (TREE_CODE (arg1) == REAL_CST)
9808 if (flag_unsafe_math_optimizations
9809 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9811 return fold_build2 (MULT_EXPR, type, arg0, tem);
9812 /* Find the reciprocal if optimizing and the result is exact. */
9816 r = TREE_REAL_CST (arg1);
9817 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9819 tem = build_real (type, r);
9820 return fold_build2 (MULT_EXPR, type,
9821 fold_convert (type, arg0), tem);
9825 /* Convert A/B/C to A/(B*C). */
9826 if (flag_unsafe_math_optimizations
9827 && TREE_CODE (arg0) == RDIV_EXPR)
9828 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9829 fold_build2 (MULT_EXPR, type,
9830 TREE_OPERAND (arg0, 1), arg1));
9832 /* Convert A/(B/C) to (A/B)*C. */
9833 if (flag_unsafe_math_optimizations
9834 && TREE_CODE (arg1) == RDIV_EXPR)
9835 return fold_build2 (MULT_EXPR, type,
9836 fold_build2 (RDIV_EXPR, type, arg0,
9837 TREE_OPERAND (arg1, 0)),
9838 TREE_OPERAND (arg1, 1));
9840 /* Convert C1/(X*C2) into (C1/C2)/X. */
9841 if (flag_unsafe_math_optimizations
9842 && TREE_CODE (arg1) == MULT_EXPR
9843 && TREE_CODE (arg0) == REAL_CST
9844 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9846 tree tem = const_binop (RDIV_EXPR, arg0,
9847 TREE_OPERAND (arg1, 1), 0);
9849 return fold_build2 (RDIV_EXPR, type, tem,
9850 TREE_OPERAND (arg1, 0));
9853 if (flag_unsafe_math_optimizations)
9855 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9856 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9858 /* Optimize sin(x)/cos(x) as tan(x). */
9859 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9860 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9861 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9862 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9863 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9865 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9867 if (tanfn != NULL_TREE)
9868 return build_function_call_expr (tanfn,
9869 TREE_OPERAND (arg0, 1));
9872 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9873 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9874 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9875 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9876 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9877 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9879 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9881 if (tanfn != NULL_TREE)
9883 tree tmp = TREE_OPERAND (arg0, 1);
9884 tmp = build_function_call_expr (tanfn, tmp);
9885 return fold_build2 (RDIV_EXPR, type,
9886 build_real (type, dconst1), tmp);
9890 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9891 NaNs or Infinities. */
9892 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9893 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9894 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9896 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9897 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9899 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9900 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9901 && operand_equal_p (arg00, arg01, 0))
9903 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9905 if (cosfn != NULL_TREE)
9906 return build_function_call_expr (cosfn,
9907 TREE_OPERAND (arg0, 1));
9911 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9912 NaNs or Infinities. */
9913 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9914 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9915 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9917 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9918 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9920 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9921 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9922 && operand_equal_p (arg00, arg01, 0))
9924 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9926 if (cosfn != NULL_TREE)
9928 tree tmp = TREE_OPERAND (arg0, 1);
9929 tmp = build_function_call_expr (cosfn, tmp);
9930 return fold_build2 (RDIV_EXPR, type,
9931 build_real (type, dconst1),
9937 /* Optimize pow(x,c)/x as pow(x,c-1). */
9938 if (fcode0 == BUILT_IN_POW
9939 || fcode0 == BUILT_IN_POWF
9940 || fcode0 == BUILT_IN_POWL)
9942 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9943 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9944 if (TREE_CODE (arg01) == REAL_CST
9945 && ! TREE_CONSTANT_OVERFLOW (arg01)
9946 && operand_equal_p (arg1, arg00, 0))
9948 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9952 c = TREE_REAL_CST (arg01);
9953 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9954 arg = build_real (type, c);
9955 arglist = build_tree_list (NULL_TREE, arg);
9956 arglist = tree_cons (NULL_TREE, arg1, arglist);
9957 return build_function_call_expr (powfn, arglist);
9961 /* Optimize x/expN(y) into x*expN(-y). */
9962 if (BUILTIN_EXPONENT_P (fcode1))
9964 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9965 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9966 tree arglist = build_tree_list (NULL_TREE,
9967 fold_convert (type, arg));
9968 arg1 = build_function_call_expr (expfn, arglist);
9969 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9972 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9973 if (fcode1 == BUILT_IN_POW
9974 || fcode1 == BUILT_IN_POWF
9975 || fcode1 == BUILT_IN_POWL)
9977 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9978 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9979 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9980 tree neg11 = fold_convert (type, negate_expr (arg11));
9981 tree arglist = tree_cons(NULL_TREE, arg10,
9982 build_tree_list (NULL_TREE, neg11));
9983 arg1 = build_function_call_expr (powfn, arglist);
9984 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9989 case TRUNC_DIV_EXPR:
9990 case FLOOR_DIV_EXPR:
9991 /* Simplify A / (B << N) where A and B are positive and B is
9992 a power of 2, to A >> (N + log2(B)). */
9993 if (TREE_CODE (arg1) == LSHIFT_EXPR
9994 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9996 tree sval = TREE_OPERAND (arg1, 0);
9997 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9999 tree sh_cnt = TREE_OPERAND (arg1, 1);
10000 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10002 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10003 sh_cnt, build_int_cst (NULL_TREE, pow2));
10004 return fold_build2 (RSHIFT_EXPR, type,
10005 fold_convert (type, arg0), sh_cnt);
10010 case ROUND_DIV_EXPR:
10011 case CEIL_DIV_EXPR:
10012 case EXACT_DIV_EXPR:
10013 if (integer_onep (arg1))
10014 return non_lvalue (fold_convert (type, arg0));
10015 if (integer_zerop (arg1))
10017 /* X / -1 is -X. */
10018 if (!TYPE_UNSIGNED (type)
10019 && TREE_CODE (arg1) == INTEGER_CST
10020 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10021 && TREE_INT_CST_HIGH (arg1) == -1)
10022 return fold_convert (type, negate_expr (arg0));
10024 /* Convert -A / -B to A / B when the type is signed and overflow is
10026 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10027 && TREE_CODE (arg0) == NEGATE_EXPR
10028 && negate_expr_p (arg1))
10029 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10030 negate_expr (arg1));
10031 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10032 && TREE_CODE (arg1) == NEGATE_EXPR
10033 && negate_expr_p (arg0))
10034 return fold_build2 (code, type, negate_expr (arg0),
10035 TREE_OPERAND (arg1, 0));
10037 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10038 operation, EXACT_DIV_EXPR.
10040 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10041 At one time others generated faster code, it's not clear if they do
10042 after the last round to changes to the DIV code in expmed.c. */
10043 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10044 && multiple_of_p (type, arg0, arg1))
10045 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10047 if (TREE_CODE (arg1) == INTEGER_CST
10048 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10049 return fold_convert (type, tem);
10053 case CEIL_MOD_EXPR:
10054 case FLOOR_MOD_EXPR:
10055 case ROUND_MOD_EXPR:
10056 case TRUNC_MOD_EXPR:
10057 /* X % 1 is always zero, but be sure to preserve any side
10059 if (integer_onep (arg1))
10060 return omit_one_operand (type, integer_zero_node, arg0);
10062 /* X % 0, return X % 0 unchanged so that we can get the
10063 proper warnings and errors. */
10064 if (integer_zerop (arg1))
10067 /* 0 % X is always zero, but be sure to preserve any side
10068 effects in X. Place this after checking for X == 0. */
10069 if (integer_zerop (arg0))
10070 return omit_one_operand (type, integer_zero_node, arg1);
10072 /* X % -1 is zero. */
10073 if (!TYPE_UNSIGNED (type)
10074 && TREE_CODE (arg1) == INTEGER_CST
10075 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10076 && TREE_INT_CST_HIGH (arg1) == -1)
10077 return omit_one_operand (type, integer_zero_node, arg0);
10079 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10080 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10081 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10082 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10085 /* Also optimize A % (C << N) where C is a power of 2,
10086 to A & ((C << N) - 1). */
10087 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10088 c = TREE_OPERAND (arg1, 0);
10090 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10092 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10093 build_int_cst (TREE_TYPE (arg1), 1));
10094 return fold_build2 (BIT_AND_EXPR, type,
10095 fold_convert (type, arg0),
10096 fold_convert (type, mask));
10100 /* X % -C is the same as X % C. */
10101 if (code == TRUNC_MOD_EXPR
10102 && !TYPE_UNSIGNED (type)
10103 && TREE_CODE (arg1) == INTEGER_CST
10104 && !TREE_CONSTANT_OVERFLOW (arg1)
10105 && TREE_INT_CST_HIGH (arg1) < 0
10107 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10108 && !sign_bit_p (arg1, arg1))
10109 return fold_build2 (code, type, fold_convert (type, arg0),
10110 fold_convert (type, negate_expr (arg1)));
10112 /* X % -Y is the same as X % Y. */
10113 if (code == TRUNC_MOD_EXPR
10114 && !TYPE_UNSIGNED (type)
10115 && TREE_CODE (arg1) == NEGATE_EXPR
10117 return fold_build2 (code, type, fold_convert (type, arg0),
10118 fold_convert (type, TREE_OPERAND (arg1, 0)));
10120 if (TREE_CODE (arg1) == INTEGER_CST
10121 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10122 return fold_convert (type, tem);
10128 if (integer_all_onesp (arg0))
10129 return omit_one_operand (type, arg0, arg1);
10133 /* Optimize -1 >> x for arithmetic right shifts. */
10134 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10135 return omit_one_operand (type, arg0, arg1);
10136 /* ... fall through ... */
10140 if (integer_zerop (arg1))
10141 return non_lvalue (fold_convert (type, arg0));
10142 if (integer_zerop (arg0))
10143 return omit_one_operand (type, arg0, arg1);
10145 /* Since negative shift count is not well-defined,
10146 don't try to compute it in the compiler. */
10147 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10150 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10151 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10152 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10153 && host_integerp (TREE_OPERAND (arg0, 1), false)
10154 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10156 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10157 + TREE_INT_CST_LOW (arg1));
10159 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10160 being well defined. */
10161 if (low >= TYPE_PRECISION (type))
10163 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10164 low = low % TYPE_PRECISION (type);
10165 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10166 return build_int_cst (type, 0);
10168 low = TYPE_PRECISION (type) - 1;
10171 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10172 build_int_cst (type, low));
10175 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10176 into x & ((unsigned)-1 >> c) for unsigned types. */
10177 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10178 || (TYPE_UNSIGNED (type)
10179 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10180 && host_integerp (arg1, false)
10181 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10182 && host_integerp (TREE_OPERAND (arg0, 1), false)
10183 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10185 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10186 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10192 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10194 lshift = build_int_cst (type, -1);
10195 lshift = int_const_binop (code, lshift, arg1, 0);
10197 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10201 /* Rewrite an LROTATE_EXPR by a constant into an
10202 RROTATE_EXPR by a new constant. */
10203 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10205 tree tem = build_int_cst (TREE_TYPE (arg1),
10206 GET_MODE_BITSIZE (TYPE_MODE (type)));
10207 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10208 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10211 /* If we have a rotate of a bit operation with the rotate count and
10212 the second operand of the bit operation both constant,
10213 permute the two operations. */
10214 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10215 && (TREE_CODE (arg0) == BIT_AND_EXPR
10216 || TREE_CODE (arg0) == BIT_IOR_EXPR
10217 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10218 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10219 return fold_build2 (TREE_CODE (arg0), type,
10220 fold_build2 (code, type,
10221 TREE_OPERAND (arg0, 0), arg1),
10222 fold_build2 (code, type,
10223 TREE_OPERAND (arg0, 1), arg1));
10225 /* Two consecutive rotates adding up to the width of the mode can
10227 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10228 && TREE_CODE (arg0) == RROTATE_EXPR
10229 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10230 && TREE_INT_CST_HIGH (arg1) == 0
10231 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10232 && ((TREE_INT_CST_LOW (arg1)
10233 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10234 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10235 return TREE_OPERAND (arg0, 0);
10240 if (operand_equal_p (arg0, arg1, 0))
10241 return omit_one_operand (type, arg0, arg1);
10242 if (INTEGRAL_TYPE_P (type)
10243 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10244 return omit_one_operand (type, arg1, arg0);
10245 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10251 if (operand_equal_p (arg0, arg1, 0))
10252 return omit_one_operand (type, arg0, arg1);
10253 if (INTEGRAL_TYPE_P (type)
10254 && TYPE_MAX_VALUE (type)
10255 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10256 return omit_one_operand (type, arg1, arg0);
10257 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10262 case TRUTH_ANDIF_EXPR:
10263 /* Note that the operands of this must be ints
10264 and their values must be 0 or 1.
10265 ("true" is a fixed value perhaps depending on the language.) */
10266 /* If first arg is constant zero, return it. */
10267 if (integer_zerop (arg0))
10268 return fold_convert (type, arg0);
10269 case TRUTH_AND_EXPR:
10270 /* If either arg is constant true, drop it. */
10271 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10272 return non_lvalue (fold_convert (type, arg1));
10273 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10274 /* Preserve sequence points. */
10275 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10276 return non_lvalue (fold_convert (type, arg0));
10277 /* If second arg is constant zero, result is zero, but first arg
10278 must be evaluated. */
10279 if (integer_zerop (arg1))
10280 return omit_one_operand (type, arg1, arg0);
10281 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10282 case will be handled here. */
10283 if (integer_zerop (arg0))
10284 return omit_one_operand (type, arg0, arg1);
10286 /* !X && X is always false. */
10287 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10288 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10289 return omit_one_operand (type, integer_zero_node, arg1);
10290 /* X && !X is always false. */
10291 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10292 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10293 return omit_one_operand (type, integer_zero_node, arg0);
10295 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10296 means A >= Y && A != MAX, but in this case we know that
10299 if (!TREE_SIDE_EFFECTS (arg0)
10300 && !TREE_SIDE_EFFECTS (arg1))
10302 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10303 if (tem && !operand_equal_p (tem, arg0, 0))
10304 return fold_build2 (code, type, tem, arg1);
10306 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10307 if (tem && !operand_equal_p (tem, arg1, 0))
10308 return fold_build2 (code, type, arg0, tem);
10312 /* We only do these simplifications if we are optimizing. */
10316 /* Check for things like (A || B) && (A || C). We can convert this
10317 to A || (B && C). Note that either operator can be any of the four
10318 truth and/or operations and the transformation will still be
10319 valid. Also note that we only care about order for the
10320 ANDIF and ORIF operators. If B contains side effects, this
10321 might change the truth-value of A. */
10322 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10323 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10324 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10325 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10326 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10327 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10329 tree a00 = TREE_OPERAND (arg0, 0);
10330 tree a01 = TREE_OPERAND (arg0, 1);
10331 tree a10 = TREE_OPERAND (arg1, 0);
10332 tree a11 = TREE_OPERAND (arg1, 1);
10333 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10334 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10335 && (code == TRUTH_AND_EXPR
10336 || code == TRUTH_OR_EXPR));
10338 if (operand_equal_p (a00, a10, 0))
10339 return fold_build2 (TREE_CODE (arg0), type, a00,
10340 fold_build2 (code, type, a01, a11));
10341 else if (commutative && operand_equal_p (a00, a11, 0))
10342 return fold_build2 (TREE_CODE (arg0), type, a00,
10343 fold_build2 (code, type, a01, a10));
10344 else if (commutative && operand_equal_p (a01, a10, 0))
10345 return fold_build2 (TREE_CODE (arg0), type, a01,
10346 fold_build2 (code, type, a00, a11));
10348 /* This case if tricky because we must either have commutative
10349 operators or else A10 must not have side-effects. */
10351 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10352 && operand_equal_p (a01, a11, 0))
10353 return fold_build2 (TREE_CODE (arg0), type,
10354 fold_build2 (code, type, a00, a10),
10358 /* See if we can build a range comparison. */
10359 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10362 /* Check for the possibility of merging component references. If our
10363 lhs is another similar operation, try to merge its rhs with our
10364 rhs. Then try to merge our lhs and rhs. */
10365 if (TREE_CODE (arg0) == code
10366 && 0 != (tem = fold_truthop (code, type,
10367 TREE_OPERAND (arg0, 1), arg1)))
10368 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10370 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10375 case TRUTH_ORIF_EXPR:
10376 /* Note that the operands of this must be ints
10377 and their values must be 0 or true.
10378 ("true" is a fixed value perhaps depending on the language.) */
10379 /* If first arg is constant true, return it. */
10380 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10381 return fold_convert (type, arg0);
10382 case TRUTH_OR_EXPR:
10383 /* If either arg is constant zero, drop it. */
10384 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10385 return non_lvalue (fold_convert (type, arg1));
10386 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10387 /* Preserve sequence points. */
10388 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10389 return non_lvalue (fold_convert (type, arg0));
10390 /* If second arg is constant true, result is true, but we must
10391 evaluate first arg. */
10392 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10393 return omit_one_operand (type, arg1, arg0);
10394 /* Likewise for first arg, but note this only occurs here for
10396 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10397 return omit_one_operand (type, arg0, arg1);
10399 /* !X || X is always true. */
10400 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10401 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10402 return omit_one_operand (type, integer_one_node, arg1);
10403 /* X || !X is always true. */
10404 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10405 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10406 return omit_one_operand (type, integer_one_node, arg0);
10410 case TRUTH_XOR_EXPR:
10411 /* If the second arg is constant zero, drop it. */
10412 if (integer_zerop (arg1))
10413 return non_lvalue (fold_convert (type, arg0));
10414 /* If the second arg is constant true, this is a logical inversion. */
10415 if (integer_onep (arg1))
10417 /* Only call invert_truthvalue if operand is a truth value. */
10418 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10419 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10421 tem = invert_truthvalue (arg0);
10422 return non_lvalue (fold_convert (type, tem));
10424 /* Identical arguments cancel to zero. */
10425 if (operand_equal_p (arg0, arg1, 0))
10426 return omit_one_operand (type, integer_zero_node, arg0);
10428 /* !X ^ X is always true. */
10429 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10430 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10431 return omit_one_operand (type, integer_one_node, arg1);
10433 /* X ^ !X is always true. */
10434 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10435 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10436 return omit_one_operand (type, integer_one_node, arg0);
10442 tem = fold_comparison (code, type, op0, op1);
10443 if (tem != NULL_TREE)
10446 /* bool_var != 0 becomes bool_var. */
10447 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10448 && code == NE_EXPR)
10449 return non_lvalue (fold_convert (type, arg0));
10451 /* bool_var == 1 becomes bool_var. */
10452 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10453 && code == EQ_EXPR)
10454 return non_lvalue (fold_convert (type, arg0));
10456 /* bool_var != 1 becomes !bool_var. */
10457 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10458 && code == NE_EXPR)
10459 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10461 /* bool_var == 0 becomes !bool_var. */
10462 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10463 && code == EQ_EXPR)
10464 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10466 /* If this is an equality comparison of the address of a non-weak
10467 object against zero, then we know the result. */
10468 if (TREE_CODE (arg0) == ADDR_EXPR
10469 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10470 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10471 && integer_zerop (arg1))
10472 return constant_boolean_node (code != EQ_EXPR, type);
10474 /* If this is an equality comparison of the address of two non-weak,
10475 unaliased symbols neither of which are extern (since we do not
10476 have access to attributes for externs), then we know the result. */
10477 if (TREE_CODE (arg0) == ADDR_EXPR
10478 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10479 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10480 && ! lookup_attribute ("alias",
10481 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10482 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10483 && TREE_CODE (arg1) == ADDR_EXPR
10484 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10485 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10486 && ! lookup_attribute ("alias",
10487 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10488 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10490 /* We know that we're looking at the address of two
10491 non-weak, unaliased, static _DECL nodes.
10493 It is both wasteful and incorrect to call operand_equal_p
10494 to compare the two ADDR_EXPR nodes. It is wasteful in that
10495 all we need to do is test pointer equality for the arguments
10496 to the two ADDR_EXPR nodes. It is incorrect to use
10497 operand_equal_p as that function is NOT equivalent to a
10498 C equality test. It can in fact return false for two
10499 objects which would test as equal using the C equality
10501 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10502 return constant_boolean_node (equal
10503 ? code == EQ_EXPR : code != EQ_EXPR,
10507 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10508 a MINUS_EXPR of a constant, we can convert it into a comparison with
10509 a revised constant as long as no overflow occurs. */
10510 if (TREE_CODE (arg1) == INTEGER_CST
10511 && (TREE_CODE (arg0) == PLUS_EXPR
10512 || TREE_CODE (arg0) == MINUS_EXPR)
10513 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10514 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10515 ? MINUS_EXPR : PLUS_EXPR,
10516 fold_convert (TREE_TYPE (arg0), arg1),
10517 TREE_OPERAND (arg0, 1), 0))
10518 && ! TREE_CONSTANT_OVERFLOW (tem))
10519 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10521 /* Similarly for a NEGATE_EXPR. */
10522 if (TREE_CODE (arg0) == NEGATE_EXPR
10523 && TREE_CODE (arg1) == INTEGER_CST
10524 && 0 != (tem = negate_expr (arg1))
10525 && TREE_CODE (tem) == INTEGER_CST
10526 && ! TREE_CONSTANT_OVERFLOW (tem))
10527 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10529 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10530 for !=. Don't do this for ordered comparisons due to overflow. */
10531 if (TREE_CODE (arg0) == MINUS_EXPR
10532 && integer_zerop (arg1))
10533 return fold_build2 (code, type,
10534 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10536 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10537 if (TREE_CODE (arg0) == ABS_EXPR
10538 && (integer_zerop (arg1) || real_zerop (arg1)))
10539 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10541 /* If this is an EQ or NE comparison with zero and ARG0 is
10542 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10543 two operations, but the latter can be done in one less insn
10544 on machines that have only two-operand insns or on which a
10545 constant cannot be the first operand. */
10546 if (TREE_CODE (arg0) == BIT_AND_EXPR
10547 && integer_zerop (arg1))
10549 tree arg00 = TREE_OPERAND (arg0, 0);
10550 tree arg01 = TREE_OPERAND (arg0, 1);
10551 if (TREE_CODE (arg00) == LSHIFT_EXPR
10552 && integer_onep (TREE_OPERAND (arg00, 0)))
10554 fold_build2 (code, type,
10555 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10556 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10557 arg01, TREE_OPERAND (arg00, 1)),
10558 fold_convert (TREE_TYPE (arg0),
10559 integer_one_node)),
10561 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10562 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10564 fold_build2 (code, type,
10565 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10566 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10567 arg00, TREE_OPERAND (arg01, 1)),
10568 fold_convert (TREE_TYPE (arg0),
10569 integer_one_node)),
10573 /* If this is an NE or EQ comparison of zero against the result of a
10574 signed MOD operation whose second operand is a power of 2, make
10575 the MOD operation unsigned since it is simpler and equivalent. */
10576 if (integer_zerop (arg1)
10577 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10578 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10579 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10580 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10581 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10582 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10584 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10585 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10586 fold_convert (newtype,
10587 TREE_OPERAND (arg0, 0)),
10588 fold_convert (newtype,
10589 TREE_OPERAND (arg0, 1)));
10591 return fold_build2 (code, type, newmod,
10592 fold_convert (newtype, arg1));
10595 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10596 C1 is a valid shift constant, and C2 is a power of two, i.e.
10598 if (TREE_CODE (arg0) == BIT_AND_EXPR
10599 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10600 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10602 && integer_pow2p (TREE_OPERAND (arg0, 1))
10603 && integer_zerop (arg1))
10605 tree itype = TREE_TYPE (arg0);
10606 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10607 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10609 /* Check for a valid shift count. */
10610 if (TREE_INT_CST_HIGH (arg001) == 0
10611 && TREE_INT_CST_LOW (arg001) < prec)
10613 tree arg01 = TREE_OPERAND (arg0, 1);
10614 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10615 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10616 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10617 can be rewritten as (X & (C2 << C1)) != 0. */
10618 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10620 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10621 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10622 return fold_build2 (code, type, tem, arg1);
10624 /* Otherwise, for signed (arithmetic) shifts,
10625 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10626 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10627 else if (!TYPE_UNSIGNED (itype))
10628 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10629 arg000, build_int_cst (itype, 0));
10630 /* Otherwise, of unsigned (logical) shifts,
10631 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10632 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10634 return omit_one_operand (type,
10635 code == EQ_EXPR ? integer_one_node
10636 : integer_zero_node,
10641 /* If this is an NE comparison of zero with an AND of one, remove the
10642 comparison since the AND will give the correct value. */
10643 if (code == NE_EXPR
10644 && integer_zerop (arg1)
10645 && TREE_CODE (arg0) == BIT_AND_EXPR
10646 && integer_onep (TREE_OPERAND (arg0, 1)))
10647 return fold_convert (type, arg0);
10649 /* If we have (A & C) == C where C is a power of 2, convert this into
10650 (A & C) != 0. Similarly for NE_EXPR. */
10651 if (TREE_CODE (arg0) == BIT_AND_EXPR
10652 && integer_pow2p (TREE_OPERAND (arg0, 1))
10653 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10654 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10655 arg0, fold_convert (TREE_TYPE (arg0),
10656 integer_zero_node));
10658 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10659 bit, then fold the expression into A < 0 or A >= 0. */
10660 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10664 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10665 Similarly for NE_EXPR. */
10666 if (TREE_CODE (arg0) == BIT_AND_EXPR
10667 && TREE_CODE (arg1) == INTEGER_CST
10668 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10670 tree notc = fold_build1 (BIT_NOT_EXPR,
10671 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10672 TREE_OPERAND (arg0, 1));
10673 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10675 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10676 if (integer_nonzerop (dandnotc))
10677 return omit_one_operand (type, rslt, arg0);
10680 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10681 Similarly for NE_EXPR. */
10682 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10683 && TREE_CODE (arg1) == INTEGER_CST
10684 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10686 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10687 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10688 TREE_OPERAND (arg0, 1), notd);
10689 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10690 if (integer_nonzerop (candnotd))
10691 return omit_one_operand (type, rslt, arg0);
10694 /* If this is a comparison of a field, we may be able to simplify it. */
10695 if (((TREE_CODE (arg0) == COMPONENT_REF
10696 && lang_hooks.can_use_bit_fields_p ())
10697 || TREE_CODE (arg0) == BIT_FIELD_REF)
10698 /* Handle the constant case even without -O
10699 to make sure the warnings are given. */
10700 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10702 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10707 /* Optimize comparisons of strlen vs zero to a compare of the
10708 first character of the string vs zero. To wit,
10709 strlen(ptr) == 0 => *ptr == 0
10710 strlen(ptr) != 0 => *ptr != 0
10711 Other cases should reduce to one of these two (or a constant)
10712 due to the return value of strlen being unsigned. */
10713 if (TREE_CODE (arg0) == CALL_EXPR
10714 && integer_zerop (arg1))
10716 tree fndecl = get_callee_fndecl (arg0);
10720 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10721 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10722 && (arglist = TREE_OPERAND (arg0, 1))
10723 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10724 && ! TREE_CHAIN (arglist))
10726 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10727 return fold_build2 (code, type, iref,
10728 build_int_cst (TREE_TYPE (iref), 0));
10732 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10733 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10734 if (TREE_CODE (arg0) == RSHIFT_EXPR
10735 && integer_zerop (arg1)
10736 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10738 tree arg00 = TREE_OPERAND (arg0, 0);
10739 tree arg01 = TREE_OPERAND (arg0, 1);
10740 tree itype = TREE_TYPE (arg00);
10741 if (TREE_INT_CST_HIGH (arg01) == 0
10742 && TREE_INT_CST_LOW (arg01)
10743 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10745 if (TYPE_UNSIGNED (itype))
10747 itype = lang_hooks.types.signed_type (itype);
10748 arg00 = fold_convert (itype, arg00);
10750 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10751 type, arg00, build_int_cst (itype, 0));
10755 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10756 if (integer_zerop (arg1)
10757 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10758 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10759 TREE_OPERAND (arg0, 1));
10761 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10762 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10763 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10764 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10765 build_int_cst (TREE_TYPE (arg1), 0));
10766 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10767 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10768 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10769 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10770 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10771 build_int_cst (TREE_TYPE (arg1), 0));
10773 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10774 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10775 && TREE_CODE (arg1) == INTEGER_CST
10776 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10777 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10778 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10779 TREE_OPERAND (arg0, 1), arg1));
10781 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10782 (X & C) == 0 when C is a single bit. */
10783 if (TREE_CODE (arg0) == BIT_AND_EXPR
10784 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10785 && integer_zerop (arg1)
10786 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10788 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10789 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10790 TREE_OPERAND (arg0, 1));
10791 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10795 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10796 constant C is a power of two, i.e. a single bit. */
10797 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10798 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10799 && integer_zerop (arg1)
10800 && integer_pow2p (TREE_OPERAND (arg0, 1))
10801 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10802 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10804 tree arg00 = TREE_OPERAND (arg0, 0);
10805 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10806 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10809 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10810 when is C is a power of two, i.e. a single bit. */
10811 if (TREE_CODE (arg0) == BIT_AND_EXPR
10812 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10813 && integer_zerop (arg1)
10814 && integer_pow2p (TREE_OPERAND (arg0, 1))
10815 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10816 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10818 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10819 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10820 arg000, TREE_OPERAND (arg0, 1));
10821 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10822 tem, build_int_cst (TREE_TYPE (tem), 0));
10825 if (integer_zerop (arg1)
10826 && tree_expr_nonzero_p (arg0))
10828 tree res = constant_boolean_node (code==NE_EXPR, type);
10829 return omit_one_operand (type, res, arg0);
10832 /* Fold -X op -Y as X op Y, where op is eq/ne. */
10833 if (TREE_CODE (arg0) == NEGATE_EXPR
10834 && TREE_CODE (arg1) == NEGATE_EXPR)
10835 return fold_build2 (code, type,
10836 TREE_OPERAND (arg0, 0),
10837 TREE_OPERAND (arg1, 0));
10839 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10840 if (TREE_CODE (arg0) == BIT_AND_EXPR
10841 && TREE_CODE (arg1) == BIT_AND_EXPR)
10843 tree arg00 = TREE_OPERAND (arg0, 0);
10844 tree arg01 = TREE_OPERAND (arg0, 1);
10845 tree arg10 = TREE_OPERAND (arg1, 0);
10846 tree arg11 = TREE_OPERAND (arg1, 1);
10847 tree itype = TREE_TYPE (arg0);
10849 if (operand_equal_p (arg01, arg11, 0))
10850 return fold_build2 (code, type,
10851 fold_build2 (BIT_AND_EXPR, itype,
10852 fold_build2 (BIT_XOR_EXPR, itype,
10855 build_int_cst (itype, 0));
10857 if (operand_equal_p (arg01, arg10, 0))
10858 return fold_build2 (code, type,
10859 fold_build2 (BIT_AND_EXPR, itype,
10860 fold_build2 (BIT_XOR_EXPR, itype,
10863 build_int_cst (itype, 0));
10865 if (operand_equal_p (arg00, arg11, 0))
10866 return fold_build2 (code, type,
10867 fold_build2 (BIT_AND_EXPR, itype,
10868 fold_build2 (BIT_XOR_EXPR, itype,
10871 build_int_cst (itype, 0));
10873 if (operand_equal_p (arg00, arg10, 0))
10874 return fold_build2 (code, type,
10875 fold_build2 (BIT_AND_EXPR, itype,
10876 fold_build2 (BIT_XOR_EXPR, itype,
10879 build_int_cst (itype, 0));
10888 tem = fold_comparison (code, type, op0, op1);
10889 if (tem != NULL_TREE)
10892 /* Transform comparisons of the form X +- C CMP X. */
10893 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10894 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10895 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10896 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10897 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10898 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10899 && !(flag_wrapv || flag_trapv))))
10901 tree arg01 = TREE_OPERAND (arg0, 1);
10902 enum tree_code code0 = TREE_CODE (arg0);
10905 if (TREE_CODE (arg01) == REAL_CST)
10906 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10908 is_positive = tree_int_cst_sgn (arg01);
10910 /* (X - c) > X becomes false. */
10911 if (code == GT_EXPR
10912 && ((code0 == MINUS_EXPR && is_positive >= 0)
10913 || (code0 == PLUS_EXPR && is_positive <= 0)))
10914 return constant_boolean_node (0, type);
10916 /* Likewise (X + c) < X becomes false. */
10917 if (code == LT_EXPR
10918 && ((code0 == PLUS_EXPR && is_positive >= 0)
10919 || (code0 == MINUS_EXPR && is_positive <= 0)))
10920 return constant_boolean_node (0, type);
10922 /* Convert (X - c) <= X to true. */
10923 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10925 && ((code0 == MINUS_EXPR && is_positive >= 0)
10926 || (code0 == PLUS_EXPR && is_positive <= 0)))
10927 return constant_boolean_node (1, type);
10929 /* Convert (X + c) >= X to true. */
10930 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10932 && ((code0 == PLUS_EXPR && is_positive >= 0)
10933 || (code0 == MINUS_EXPR && is_positive <= 0)))
10934 return constant_boolean_node (1, type);
10936 if (TREE_CODE (arg01) == INTEGER_CST)
10938 /* Convert X + c > X and X - c < X to true for integers. */
10939 if (code == GT_EXPR
10940 && ((code0 == PLUS_EXPR && is_positive > 0)
10941 || (code0 == MINUS_EXPR && is_positive < 0)))
10942 return constant_boolean_node (1, type);
10944 if (code == LT_EXPR
10945 && ((code0 == MINUS_EXPR && is_positive > 0)
10946 || (code0 == PLUS_EXPR && is_positive < 0)))
10947 return constant_boolean_node (1, type);
10949 /* Convert X + c <= X and X - c >= X to false for integers. */
10950 if (code == LE_EXPR
10951 && ((code0 == PLUS_EXPR && is_positive > 0)
10952 || (code0 == MINUS_EXPR && is_positive < 0)))
10953 return constant_boolean_node (0, type);
10955 if (code == GE_EXPR
10956 && ((code0 == MINUS_EXPR && is_positive > 0)
10957 || (code0 == PLUS_EXPR && is_positive < 0)))
10958 return constant_boolean_node (0, type);
10962 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10963 This transformation affects the cases which are handled in later
10964 optimizations involving comparisons with non-negative constants. */
10965 if (TREE_CODE (arg1) == INTEGER_CST
10966 && TREE_CODE (arg0) != INTEGER_CST
10967 && tree_int_cst_sgn (arg1) > 0)
10969 if (code == GE_EXPR)
10971 arg1 = const_binop (MINUS_EXPR, arg1,
10972 build_int_cst (TREE_TYPE (arg1), 1), 0);
10973 return fold_build2 (GT_EXPR, type, arg0,
10974 fold_convert (TREE_TYPE (arg0), arg1));
10976 if (code == LT_EXPR)
10978 arg1 = const_binop (MINUS_EXPR, arg1,
10979 build_int_cst (TREE_TYPE (arg1), 1), 0);
10980 return fold_build2 (LE_EXPR, type, arg0,
10981 fold_convert (TREE_TYPE (arg0), arg1));
10985 /* Comparisons with the highest or lowest possible integer of
10986 the specified size will have known values. */
10988 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10990 if (TREE_CODE (arg1) == INTEGER_CST
10991 && ! TREE_CONSTANT_OVERFLOW (arg1)
10992 && width <= 2 * HOST_BITS_PER_WIDE_INT
10993 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10994 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10996 HOST_WIDE_INT signed_max_hi;
10997 unsigned HOST_WIDE_INT signed_max_lo;
10998 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11000 if (width <= HOST_BITS_PER_WIDE_INT)
11002 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11007 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11009 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11015 max_lo = signed_max_lo;
11016 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11022 width -= HOST_BITS_PER_WIDE_INT;
11023 signed_max_lo = -1;
11024 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11029 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11031 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11036 max_hi = signed_max_hi;
11037 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11041 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11042 && TREE_INT_CST_LOW (arg1) == max_lo)
11046 return omit_one_operand (type, integer_zero_node, arg0);
11049 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11052 return omit_one_operand (type, integer_one_node, arg0);
11055 return fold_build2 (NE_EXPR, type, arg0, arg1);
11057 /* The GE_EXPR and LT_EXPR cases above are not normally
11058 reached because of previous transformations. */
11063 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11065 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11069 arg1 = const_binop (PLUS_EXPR, arg1,
11070 build_int_cst (TREE_TYPE (arg1), 1), 0);
11071 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11073 arg1 = const_binop (PLUS_EXPR, arg1,
11074 build_int_cst (TREE_TYPE (arg1), 1), 0);
11075 return fold_build2 (NE_EXPR, type, arg0, arg1);
11079 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11081 && TREE_INT_CST_LOW (arg1) == min_lo)
11085 return omit_one_operand (type, integer_zero_node, arg0);
11088 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11091 return omit_one_operand (type, integer_one_node, arg0);
11094 return fold_build2 (NE_EXPR, type, op0, op1);
11099 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11101 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11105 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11106 return fold_build2 (NE_EXPR, type, arg0, arg1);
11108 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11109 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11114 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11115 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11116 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11117 /* signed_type does not work on pointer types. */
11118 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11120 /* The following case also applies to X < signed_max+1
11121 and X >= signed_max+1 because previous transformations. */
11122 if (code == LE_EXPR || code == GT_EXPR)
11125 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11126 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11127 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11128 type, fold_convert (st0, arg0),
11129 build_int_cst (st1, 0));
11135 /* If we are comparing an ABS_EXPR with a constant, we can
11136 convert all the cases into explicit comparisons, but they may
11137 well not be faster than doing the ABS and one comparison.
11138 But ABS (X) <= C is a range comparison, which becomes a subtraction
11139 and a comparison, and is probably faster. */
11140 if (code == LE_EXPR
11141 && TREE_CODE (arg1) == INTEGER_CST
11142 && TREE_CODE (arg0) == ABS_EXPR
11143 && ! TREE_SIDE_EFFECTS (arg0)
11144 && (0 != (tem = negate_expr (arg1)))
11145 && TREE_CODE (tem) == INTEGER_CST
11146 && ! TREE_CONSTANT_OVERFLOW (tem))
11147 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11148 build2 (GE_EXPR, type,
11149 TREE_OPERAND (arg0, 0), tem),
11150 build2 (LE_EXPR, type,
11151 TREE_OPERAND (arg0, 0), arg1));
11153 /* Convert ABS_EXPR<x> >= 0 to true. */
11154 if (code == GE_EXPR
11155 && tree_expr_nonnegative_p (arg0)
11156 && (integer_zerop (arg1)
11157 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11158 && real_zerop (arg1))))
11159 return omit_one_operand (type, integer_one_node, arg0);
11161 /* Convert ABS_EXPR<x> < 0 to false. */
11162 if (code == LT_EXPR
11163 && tree_expr_nonnegative_p (arg0)
11164 && (integer_zerop (arg1) || real_zerop (arg1)))
11165 return omit_one_operand (type, integer_zero_node, arg0);
11167 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11168 and similarly for >= into !=. */
11169 if ((code == LT_EXPR || code == GE_EXPR)
11170 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11171 && TREE_CODE (arg1) == LSHIFT_EXPR
11172 && integer_onep (TREE_OPERAND (arg1, 0)))
11173 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11174 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11175 TREE_OPERAND (arg1, 1)),
11176 build_int_cst (TREE_TYPE (arg0), 0));
11178 if ((code == LT_EXPR || code == GE_EXPR)
11179 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11180 && (TREE_CODE (arg1) == NOP_EXPR
11181 || TREE_CODE (arg1) == CONVERT_EXPR)
11182 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11183 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11185 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11186 fold_convert (TREE_TYPE (arg0),
11187 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11188 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11190 build_int_cst (TREE_TYPE (arg0), 0));
11194 case UNORDERED_EXPR:
11202 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11204 t1 = fold_relational_const (code, type, arg0, arg1);
11205 if (t1 != NULL_TREE)
11209 /* If the first operand is NaN, the result is constant. */
11210 if (TREE_CODE (arg0) == REAL_CST
11211 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11212 && (code != LTGT_EXPR || ! flag_trapping_math))
11214 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11215 ? integer_zero_node
11216 : integer_one_node;
11217 return omit_one_operand (type, t1, arg1);
11220 /* If the second operand is NaN, the result is constant. */
11221 if (TREE_CODE (arg1) == REAL_CST
11222 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11223 && (code != LTGT_EXPR || ! flag_trapping_math))
11225 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11226 ? integer_zero_node
11227 : integer_one_node;
11228 return omit_one_operand (type, t1, arg0);
11231 /* Simplify unordered comparison of something with itself. */
11232 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11233 && operand_equal_p (arg0, arg1, 0))
11234 return constant_boolean_node (1, type);
11236 if (code == LTGT_EXPR
11237 && !flag_trapping_math
11238 && operand_equal_p (arg0, arg1, 0))
11239 return constant_boolean_node (0, type);
11241 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11243 tree targ0 = strip_float_extensions (arg0);
11244 tree targ1 = strip_float_extensions (arg1);
11245 tree newtype = TREE_TYPE (targ0);
11247 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11248 newtype = TREE_TYPE (targ1);
11250 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11251 return fold_build2 (code, type, fold_convert (newtype, targ0),
11252 fold_convert (newtype, targ1));
11257 case COMPOUND_EXPR:
11258 /* When pedantic, a compound expression can be neither an lvalue
11259 nor an integer constant expression. */
11260 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11262 /* Don't let (0, 0) be null pointer constant. */
11263 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11264 : fold_convert (type, arg1);
11265 return pedantic_non_lvalue (tem);
11268 if ((TREE_CODE (arg0) == REAL_CST
11269 && TREE_CODE (arg1) == REAL_CST)
11270 || (TREE_CODE (arg0) == INTEGER_CST
11271 && TREE_CODE (arg1) == INTEGER_CST))
11272 return build_complex (type, arg0, arg1);
11276 /* An ASSERT_EXPR should never be passed to fold_binary. */
11277 gcc_unreachable ();
11281 } /* switch (code) */
11284 /* Callback for walk_tree, looking for LABEL_EXPR.
11285 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11286 Do not check the sub-tree of GOTO_EXPR. */
11289 contains_label_1 (tree *tp,
11290 int *walk_subtrees,
11291 void *data ATTRIBUTE_UNUSED)
11293 switch (TREE_CODE (*tp))
11298 *walk_subtrees = 0;
11305 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11306 accessible from outside the sub-tree. Returns NULL_TREE if no
11307 addressable label is found. */
11310 contains_label_p (tree st)
11312 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11315 /* Fold a ternary expression of code CODE and type TYPE with operands
11316 OP0, OP1, and OP2. Return the folded expression if folding is
11317 successful. Otherwise, return NULL_TREE. */
11320 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11323 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11324 enum tree_code_class kind = TREE_CODE_CLASS (code);
11326 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11327 && TREE_CODE_LENGTH (code) == 3);
11329 /* Strip any conversions that don't change the mode. This is safe
11330 for every expression, except for a comparison expression because
11331 its signedness is derived from its operands. So, in the latter
11332 case, only strip conversions that don't change the signedness.
11334 Note that this is done as an internal manipulation within the
11335 constant folder, in order to find the simplest representation of
11336 the arguments so that their form can be studied. In any cases,
11337 the appropriate type conversions should be put back in the tree
11338 that will get out of the constant folder. */
11353 case COMPONENT_REF:
11354 if (TREE_CODE (arg0) == CONSTRUCTOR
11355 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11357 unsigned HOST_WIDE_INT idx;
11359 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11366 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11367 so all simple results must be passed through pedantic_non_lvalue. */
11368 if (TREE_CODE (arg0) == INTEGER_CST)
11370 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11371 tem = integer_zerop (arg0) ? op2 : op1;
11372 /* Only optimize constant conditions when the selected branch
11373 has the same type as the COND_EXPR. This avoids optimizing
11374 away "c ? x : throw", where the throw has a void type.
11375 Avoid throwing away that operand which contains label. */
11376 if ((!TREE_SIDE_EFFECTS (unused_op)
11377 || !contains_label_p (unused_op))
11378 && (! VOID_TYPE_P (TREE_TYPE (tem))
11379 || VOID_TYPE_P (type)))
11380 return pedantic_non_lvalue (tem);
11383 if (operand_equal_p (arg1, op2, 0))
11384 return pedantic_omit_one_operand (type, arg1, arg0);
11386 /* If we have A op B ? A : C, we may be able to convert this to a
11387 simpler expression, depending on the operation and the values
11388 of B and C. Signed zeros prevent all of these transformations,
11389 for reasons given above each one.
11391 Also try swapping the arguments and inverting the conditional. */
11392 if (COMPARISON_CLASS_P (arg0)
11393 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11394 arg1, TREE_OPERAND (arg0, 1))
11395 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11397 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11402 if (COMPARISON_CLASS_P (arg0)
11403 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11405 TREE_OPERAND (arg0, 1))
11406 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11408 tem = fold_truth_not_expr (arg0);
11409 if (tem && COMPARISON_CLASS_P (tem))
11411 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11417 /* If the second operand is simpler than the third, swap them
11418 since that produces better jump optimization results. */
11419 if (truth_value_p (TREE_CODE (arg0))
11420 && tree_swap_operands_p (op1, op2, false))
11422 /* See if this can be inverted. If it can't, possibly because
11423 it was a floating-point inequality comparison, don't do
11425 tem = fold_truth_not_expr (arg0);
11427 return fold_build3 (code, type, tem, op2, op1);
11430 /* Convert A ? 1 : 0 to simply A. */
11431 if (integer_onep (op1)
11432 && integer_zerop (op2)
11433 /* If we try to convert OP0 to our type, the
11434 call to fold will try to move the conversion inside
11435 a COND, which will recurse. In that case, the COND_EXPR
11436 is probably the best choice, so leave it alone. */
11437 && type == TREE_TYPE (arg0))
11438 return pedantic_non_lvalue (arg0);
11440 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11441 over COND_EXPR in cases such as floating point comparisons. */
11442 if (integer_zerop (op1)
11443 && integer_onep (op2)
11444 && truth_value_p (TREE_CODE (arg0)))
11445 return pedantic_non_lvalue (fold_convert (type,
11446 invert_truthvalue (arg0)));
11448 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11449 if (TREE_CODE (arg0) == LT_EXPR
11450 && integer_zerop (TREE_OPERAND (arg0, 1))
11451 && integer_zerop (op2)
11452 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11454 /* sign_bit_p only checks ARG1 bits within A's precision.
11455 If <sign bit of A> has wider type than A, bits outside
11456 of A's precision in <sign bit of A> need to be checked.
11457 If they are all 0, this optimization needs to be done
11458 in unsigned A's type, if they are all 1 in signed A's type,
11459 otherwise this can't be done. */
11460 if (TYPE_PRECISION (TREE_TYPE (tem))
11461 < TYPE_PRECISION (TREE_TYPE (arg1))
11462 && TYPE_PRECISION (TREE_TYPE (tem))
11463 < TYPE_PRECISION (type))
11465 unsigned HOST_WIDE_INT mask_lo;
11466 HOST_WIDE_INT mask_hi;
11467 int inner_width, outer_width;
11470 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11471 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11472 if (outer_width > TYPE_PRECISION (type))
11473 outer_width = TYPE_PRECISION (type);
11475 if (outer_width > HOST_BITS_PER_WIDE_INT)
11477 mask_hi = ((unsigned HOST_WIDE_INT) -1
11478 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11484 mask_lo = ((unsigned HOST_WIDE_INT) -1
11485 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11487 if (inner_width > HOST_BITS_PER_WIDE_INT)
11489 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11490 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11494 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11495 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11497 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11498 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11500 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11501 tem = fold_convert (tem_type, tem);
11503 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11504 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11506 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11507 tem = fold_convert (tem_type, tem);
11514 return fold_convert (type,
11515 fold_build2 (BIT_AND_EXPR,
11516 TREE_TYPE (tem), tem,
11517 fold_convert (TREE_TYPE (tem),
11521 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11522 already handled above. */
11523 if (TREE_CODE (arg0) == BIT_AND_EXPR
11524 && integer_onep (TREE_OPERAND (arg0, 1))
11525 && integer_zerop (op2)
11526 && integer_pow2p (arg1))
11528 tree tem = TREE_OPERAND (arg0, 0);
11530 if (TREE_CODE (tem) == RSHIFT_EXPR
11531 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11532 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11533 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11534 return fold_build2 (BIT_AND_EXPR, type,
11535 TREE_OPERAND (tem, 0), arg1);
11538 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11539 is probably obsolete because the first operand should be a
11540 truth value (that's why we have the two cases above), but let's
11541 leave it in until we can confirm this for all front-ends. */
11542 if (integer_zerop (op2)
11543 && TREE_CODE (arg0) == NE_EXPR
11544 && integer_zerop (TREE_OPERAND (arg0, 1))
11545 && integer_pow2p (arg1)
11546 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11547 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11548 arg1, OEP_ONLY_CONST))
11549 return pedantic_non_lvalue (fold_convert (type,
11550 TREE_OPERAND (arg0, 0)));
11552 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11553 if (integer_zerop (op2)
11554 && truth_value_p (TREE_CODE (arg0))
11555 && truth_value_p (TREE_CODE (arg1)))
11556 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11557 fold_convert (type, arg0),
11560 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11561 if (integer_onep (op2)
11562 && truth_value_p (TREE_CODE (arg0))
11563 && truth_value_p (TREE_CODE (arg1)))
11565 /* Only perform transformation if ARG0 is easily inverted. */
11566 tem = fold_truth_not_expr (arg0);
11568 return fold_build2 (TRUTH_ORIF_EXPR, type,
11569 fold_convert (type, tem),
11573 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11574 if (integer_zerop (arg1)
11575 && truth_value_p (TREE_CODE (arg0))
11576 && truth_value_p (TREE_CODE (op2)))
11578 /* Only perform transformation if ARG0 is easily inverted. */
11579 tem = fold_truth_not_expr (arg0);
11581 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11582 fold_convert (type, tem),
11586 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11587 if (integer_onep (arg1)
11588 && truth_value_p (TREE_CODE (arg0))
11589 && truth_value_p (TREE_CODE (op2)))
11590 return fold_build2 (TRUTH_ORIF_EXPR, type,
11591 fold_convert (type, arg0),
11597 /* Check for a built-in function. */
11598 if (TREE_CODE (op0) == ADDR_EXPR
11599 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11600 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11601 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11604 case BIT_FIELD_REF:
11605 if (TREE_CODE (arg0) == VECTOR_CST
11606 && type == TREE_TYPE (TREE_TYPE (arg0))
11607 && host_integerp (arg1, 1)
11608 && host_integerp (op2, 1))
11610 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11611 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11614 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11615 && (idx % width) == 0
11616 && (idx = idx / width)
11617 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11619 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11620 while (idx-- > 0 && elements)
11621 elements = TREE_CHAIN (elements);
11623 return TREE_VALUE (elements);
11625 return fold_convert (type, integer_zero_node);
11632 } /* switch (code) */
11635 /* Perform constant folding and related simplification of EXPR.
11636 The related simplifications include x*1 => x, x*0 => 0, etc.,
11637 and application of the associative law.
11638 NOP_EXPR conversions may be removed freely (as long as we
11639 are careful not to change the type of the overall expression).
11640 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11641 but we can constant-fold them if they have constant operands. */
11643 #ifdef ENABLE_FOLD_CHECKING
11644 # define fold(x) fold_1 (x)
11645 static tree fold_1 (tree);
11651 const tree t = expr;
11652 enum tree_code code = TREE_CODE (t);
11653 enum tree_code_class kind = TREE_CODE_CLASS (code);
11656 /* Return right away if a constant. */
11657 if (kind == tcc_constant)
11660 if (IS_EXPR_CODE_CLASS (kind))
11662 tree type = TREE_TYPE (t);
11663 tree op0, op1, op2;
11665 switch (TREE_CODE_LENGTH (code))
11668 op0 = TREE_OPERAND (t, 0);
11669 tem = fold_unary (code, type, op0);
11670 return tem ? tem : expr;
11672 op0 = TREE_OPERAND (t, 0);
11673 op1 = TREE_OPERAND (t, 1);
11674 tem = fold_binary (code, type, op0, op1);
11675 return tem ? tem : expr;
11677 op0 = TREE_OPERAND (t, 0);
11678 op1 = TREE_OPERAND (t, 1);
11679 op2 = TREE_OPERAND (t, 2);
11680 tem = fold_ternary (code, type, op0, op1, op2);
11681 return tem ? tem : expr;
11690 return fold (DECL_INITIAL (t));
11694 } /* switch (code) */
11697 #ifdef ENABLE_FOLD_CHECKING
11700 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11701 static void fold_check_failed (tree, tree);
11702 void print_fold_checksum (tree);
11704 /* When --enable-checking=fold, compute a digest of expr before
11705 and after actual fold call to see if fold did not accidentally
11706 change original expr. */
11712 struct md5_ctx ctx;
11713 unsigned char checksum_before[16], checksum_after[16];
11716 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11717 md5_init_ctx (&ctx);
11718 fold_checksum_tree (expr, &ctx, ht);
11719 md5_finish_ctx (&ctx, checksum_before);
11722 ret = fold_1 (expr);
11724 md5_init_ctx (&ctx);
11725 fold_checksum_tree (expr, &ctx, ht);
11726 md5_finish_ctx (&ctx, checksum_after);
11729 if (memcmp (checksum_before, checksum_after, 16))
11730 fold_check_failed (expr, ret);
11736 print_fold_checksum (tree expr)
11738 struct md5_ctx ctx;
11739 unsigned char checksum[16], cnt;
11742 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11743 md5_init_ctx (&ctx);
11744 fold_checksum_tree (expr, &ctx, ht);
11745 md5_finish_ctx (&ctx, checksum);
11747 for (cnt = 0; cnt < 16; ++cnt)
11748 fprintf (stderr, "%02x", checksum[cnt]);
11749 putc ('\n', stderr);
11753 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11755 internal_error ("fold check: original tree changed by fold");
11759 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11762 enum tree_code code;
11763 struct tree_function_decl buf;
11768 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11769 <= sizeof (struct tree_function_decl))
11770 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11773 slot = htab_find_slot (ht, expr, INSERT);
11777 code = TREE_CODE (expr);
11778 if (TREE_CODE_CLASS (code) == tcc_declaration
11779 && DECL_ASSEMBLER_NAME_SET_P (expr))
11781 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11782 memcpy ((char *) &buf, expr, tree_size (expr));
11783 expr = (tree) &buf;
11784 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11786 else if (TREE_CODE_CLASS (code) == tcc_type
11787 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11788 || TYPE_CACHED_VALUES_P (expr)
11789 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11791 /* Allow these fields to be modified. */
11792 memcpy ((char *) &buf, expr, tree_size (expr));
11793 expr = (tree) &buf;
11794 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11795 TYPE_POINTER_TO (expr) = NULL;
11796 TYPE_REFERENCE_TO (expr) = NULL;
11797 if (TYPE_CACHED_VALUES_P (expr))
11799 TYPE_CACHED_VALUES_P (expr) = 0;
11800 TYPE_CACHED_VALUES (expr) = NULL;
11803 md5_process_bytes (expr, tree_size (expr), ctx);
11804 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11805 if (TREE_CODE_CLASS (code) != tcc_type
11806 && TREE_CODE_CLASS (code) != tcc_declaration
11807 && code != TREE_LIST)
11808 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11809 switch (TREE_CODE_CLASS (code))
11815 md5_process_bytes (TREE_STRING_POINTER (expr),
11816 TREE_STRING_LENGTH (expr), ctx);
11819 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11820 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11823 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11829 case tcc_exceptional:
11833 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11834 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11835 expr = TREE_CHAIN (expr);
11836 goto recursive_label;
11839 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11840 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11846 case tcc_expression:
11847 case tcc_reference:
11848 case tcc_comparison:
11851 case tcc_statement:
11852 len = TREE_CODE_LENGTH (code);
11853 for (i = 0; i < len; ++i)
11854 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11856 case tcc_declaration:
11857 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11858 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11859 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11861 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11862 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11863 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11864 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11865 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11867 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11868 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11870 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11872 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11873 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11874 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11878 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11879 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11880 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11881 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11882 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11883 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11884 if (INTEGRAL_TYPE_P (expr)
11885 || SCALAR_FLOAT_TYPE_P (expr))
11887 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11888 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11890 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11891 if (TREE_CODE (expr) == RECORD_TYPE
11892 || TREE_CODE (expr) == UNION_TYPE
11893 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11894 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11895 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11904 /* Fold a unary tree expression with code CODE of type TYPE with an
11905 operand OP0. Return a folded expression if successful. Otherwise,
11906 return a tree expression with code CODE of type TYPE with an
11910 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11913 #ifdef ENABLE_FOLD_CHECKING
11914 unsigned char checksum_before[16], checksum_after[16];
11915 struct md5_ctx ctx;
11918 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11919 md5_init_ctx (&ctx);
11920 fold_checksum_tree (op0, &ctx, ht);
11921 md5_finish_ctx (&ctx, checksum_before);
11925 tem = fold_unary (code, type, op0);
11927 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11929 #ifdef ENABLE_FOLD_CHECKING
11930 md5_init_ctx (&ctx);
11931 fold_checksum_tree (op0, &ctx, ht);
11932 md5_finish_ctx (&ctx, checksum_after);
11935 if (memcmp (checksum_before, checksum_after, 16))
11936 fold_check_failed (op0, tem);
11941 /* Fold a binary tree expression with code CODE of type TYPE with
11942 operands OP0 and OP1. Return a folded expression if successful.
11943 Otherwise, return a tree expression with code CODE of type TYPE
11944 with operands OP0 and OP1. */
11947 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11951 #ifdef ENABLE_FOLD_CHECKING
11952 unsigned char checksum_before_op0[16],
11953 checksum_before_op1[16],
11954 checksum_after_op0[16],
11955 checksum_after_op1[16];
11956 struct md5_ctx ctx;
11959 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11960 md5_init_ctx (&ctx);
11961 fold_checksum_tree (op0, &ctx, ht);
11962 md5_finish_ctx (&ctx, checksum_before_op0);
11965 md5_init_ctx (&ctx);
11966 fold_checksum_tree (op1, &ctx, ht);
11967 md5_finish_ctx (&ctx, checksum_before_op1);
11971 tem = fold_binary (code, type, op0, op1);
11973 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11975 #ifdef ENABLE_FOLD_CHECKING
11976 md5_init_ctx (&ctx);
11977 fold_checksum_tree (op0, &ctx, ht);
11978 md5_finish_ctx (&ctx, checksum_after_op0);
11981 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11982 fold_check_failed (op0, tem);
11984 md5_init_ctx (&ctx);
11985 fold_checksum_tree (op1, &ctx, ht);
11986 md5_finish_ctx (&ctx, checksum_after_op1);
11989 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11990 fold_check_failed (op1, tem);
11995 /* Fold a ternary tree expression with code CODE of type TYPE with
11996 operands OP0, OP1, and OP2. Return a folded expression if
11997 successful. Otherwise, return a tree expression with code CODE of
11998 type TYPE with operands OP0, OP1, and OP2. */
12001 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12005 #ifdef ENABLE_FOLD_CHECKING
12006 unsigned char checksum_before_op0[16],
12007 checksum_before_op1[16],
12008 checksum_before_op2[16],
12009 checksum_after_op0[16],
12010 checksum_after_op1[16],
12011 checksum_after_op2[16];
12012 struct md5_ctx ctx;
12015 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12016 md5_init_ctx (&ctx);
12017 fold_checksum_tree (op0, &ctx, ht);
12018 md5_finish_ctx (&ctx, checksum_before_op0);
12021 md5_init_ctx (&ctx);
12022 fold_checksum_tree (op1, &ctx, ht);
12023 md5_finish_ctx (&ctx, checksum_before_op1);
12026 md5_init_ctx (&ctx);
12027 fold_checksum_tree (op2, &ctx, ht);
12028 md5_finish_ctx (&ctx, checksum_before_op2);
12032 tem = fold_ternary (code, type, op0, op1, op2);
12034 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12036 #ifdef ENABLE_FOLD_CHECKING
12037 md5_init_ctx (&ctx);
12038 fold_checksum_tree (op0, &ctx, ht);
12039 md5_finish_ctx (&ctx, checksum_after_op0);
12042 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12043 fold_check_failed (op0, tem);
12045 md5_init_ctx (&ctx);
12046 fold_checksum_tree (op1, &ctx, ht);
12047 md5_finish_ctx (&ctx, checksum_after_op1);
12050 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12051 fold_check_failed (op1, tem);
12053 md5_init_ctx (&ctx);
12054 fold_checksum_tree (op2, &ctx, ht);
12055 md5_finish_ctx (&ctx, checksum_after_op2);
12058 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12059 fold_check_failed (op2, tem);
12064 /* Perform constant folding and related simplification of initializer
12065 expression EXPR. These behave identically to "fold_buildN" but ignore
12066 potential run-time traps and exceptions that fold must preserve. */
12068 #define START_FOLD_INIT \
12069 int saved_signaling_nans = flag_signaling_nans;\
12070 int saved_trapping_math = flag_trapping_math;\
12071 int saved_rounding_math = flag_rounding_math;\
12072 int saved_trapv = flag_trapv;\
12073 int saved_folding_initializer = folding_initializer;\
12074 flag_signaling_nans = 0;\
12075 flag_trapping_math = 0;\
12076 flag_rounding_math = 0;\
12078 folding_initializer = 1;
12080 #define END_FOLD_INIT \
12081 flag_signaling_nans = saved_signaling_nans;\
12082 flag_trapping_math = saved_trapping_math;\
12083 flag_rounding_math = saved_rounding_math;\
12084 flag_trapv = saved_trapv;\
12085 folding_initializer = saved_folding_initializer;
12088 fold_build1_initializer (enum tree_code code, tree type, tree op)
12093 result = fold_build1 (code, type, op);
12100 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12105 result = fold_build2 (code, type, op0, op1);
12112 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12118 result = fold_build3 (code, type, op0, op1, op2);
12124 #undef START_FOLD_INIT
12125 #undef END_FOLD_INIT
12127 /* Determine if first argument is a multiple of second argument. Return 0 if
12128 it is not, or we cannot easily determined it to be.
12130 An example of the sort of thing we care about (at this point; this routine
12131 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12132 fold cases do now) is discovering that
12134 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12140 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12142 This code also handles discovering that
12144 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12146 is a multiple of 8 so we don't have to worry about dealing with a
12147 possible remainder.
12149 Note that we *look* inside a SAVE_EXPR only to determine how it was
12150 calculated; it is not safe for fold to do much of anything else with the
12151 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12152 at run time. For example, the latter example above *cannot* be implemented
12153 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12154 evaluation time of the original SAVE_EXPR is not necessarily the same at
12155 the time the new expression is evaluated. The only optimization of this
12156 sort that would be valid is changing
12158 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12162 SAVE_EXPR (I) * SAVE_EXPR (J)
12164 (where the same SAVE_EXPR (J) is used in the original and the
12165 transformed version). */
12168 multiple_of_p (tree type, tree top, tree bottom)
12170 if (operand_equal_p (top, bottom, 0))
12173 if (TREE_CODE (type) != INTEGER_TYPE)
12176 switch (TREE_CODE (top))
12179 /* Bitwise and provides a power of two multiple. If the mask is
12180 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12181 if (!integer_pow2p (bottom))
12186 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12187 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12191 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12192 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12195 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12199 op1 = TREE_OPERAND (top, 1);
12200 /* const_binop may not detect overflow correctly,
12201 so check for it explicitly here. */
12202 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12203 > TREE_INT_CST_LOW (op1)
12204 && TREE_INT_CST_HIGH (op1) == 0
12205 && 0 != (t1 = fold_convert (type,
12206 const_binop (LSHIFT_EXPR,
12209 && ! TREE_OVERFLOW (t1))
12210 return multiple_of_p (type, t1, bottom);
12215 /* Can't handle conversions from non-integral or wider integral type. */
12216 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12217 || (TYPE_PRECISION (type)
12218 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12221 /* .. fall through ... */
12224 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12227 if (TREE_CODE (bottom) != INTEGER_CST
12228 || (TYPE_UNSIGNED (type)
12229 && (tree_int_cst_sgn (top) < 0
12230 || tree_int_cst_sgn (bottom) < 0)))
12232 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12240 /* Return true if `t' is known to be non-negative. */
12243 tree_expr_nonnegative_p (tree t)
12245 if (t == error_mark_node)
12248 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12251 switch (TREE_CODE (t))
12254 /* Query VRP to see if it has recorded any information about
12255 the range of this object. */
12256 return ssa_name_nonnegative_p (t);
12259 /* We can't return 1 if flag_wrapv is set because
12260 ABS_EXPR<INT_MIN> = INT_MIN. */
12261 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12266 return tree_int_cst_sgn (t) >= 0;
12269 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12272 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12273 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12274 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12276 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12277 both unsigned and at least 2 bits shorter than the result. */
12278 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12279 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12280 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12282 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12283 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12284 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12285 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12287 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12288 TYPE_PRECISION (inner2)) + 1;
12289 return prec < TYPE_PRECISION (TREE_TYPE (t));
12295 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12297 /* x * x for floating point x is always non-negative. */
12298 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12300 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12301 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12304 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12305 both unsigned and their total bits is shorter than the result. */
12306 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12307 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12308 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12310 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12311 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12312 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12313 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12314 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12315 < TYPE_PRECISION (TREE_TYPE (t));
12321 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12322 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12328 case TRUNC_DIV_EXPR:
12329 case CEIL_DIV_EXPR:
12330 case FLOOR_DIV_EXPR:
12331 case ROUND_DIV_EXPR:
12332 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12333 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12335 case TRUNC_MOD_EXPR:
12336 case CEIL_MOD_EXPR:
12337 case FLOOR_MOD_EXPR:
12338 case ROUND_MOD_EXPR:
12340 case NON_LVALUE_EXPR:
12342 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12344 case COMPOUND_EXPR:
12346 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12349 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12352 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12353 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12357 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12358 tree outer_type = TREE_TYPE (t);
12360 if (TREE_CODE (outer_type) == REAL_TYPE)
12362 if (TREE_CODE (inner_type) == REAL_TYPE)
12363 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12364 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12366 if (TYPE_UNSIGNED (inner_type))
12368 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12371 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12373 if (TREE_CODE (inner_type) == REAL_TYPE)
12374 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12375 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12376 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12377 && TYPE_UNSIGNED (inner_type);
12384 tree temp = TARGET_EXPR_SLOT (t);
12385 t = TARGET_EXPR_INITIAL (t);
12387 /* If the initializer is non-void, then it's a normal expression
12388 that will be assigned to the slot. */
12389 if (!VOID_TYPE_P (t))
12390 return tree_expr_nonnegative_p (t);
12392 /* Otherwise, the initializer sets the slot in some way. One common
12393 way is an assignment statement at the end of the initializer. */
12396 if (TREE_CODE (t) == BIND_EXPR)
12397 t = expr_last (BIND_EXPR_BODY (t));
12398 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12399 || TREE_CODE (t) == TRY_CATCH_EXPR)
12400 t = expr_last (TREE_OPERAND (t, 0));
12401 else if (TREE_CODE (t) == STATEMENT_LIST)
12406 if (TREE_CODE (t) == MODIFY_EXPR
12407 && TREE_OPERAND (t, 0) == temp)
12408 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12415 tree fndecl = get_callee_fndecl (t);
12416 tree arglist = TREE_OPERAND (t, 1);
12417 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12418 switch (DECL_FUNCTION_CODE (fndecl))
12420 CASE_FLT_FN (BUILT_IN_ACOS):
12421 CASE_FLT_FN (BUILT_IN_ACOSH):
12422 CASE_FLT_FN (BUILT_IN_CABS):
12423 CASE_FLT_FN (BUILT_IN_COSH):
12424 CASE_FLT_FN (BUILT_IN_ERFC):
12425 CASE_FLT_FN (BUILT_IN_EXP):
12426 CASE_FLT_FN (BUILT_IN_EXP10):
12427 CASE_FLT_FN (BUILT_IN_EXP2):
12428 CASE_FLT_FN (BUILT_IN_FABS):
12429 CASE_FLT_FN (BUILT_IN_FDIM):
12430 CASE_FLT_FN (BUILT_IN_HYPOT):
12431 CASE_FLT_FN (BUILT_IN_POW10):
12432 CASE_INT_FN (BUILT_IN_FFS):
12433 CASE_INT_FN (BUILT_IN_PARITY):
12434 CASE_INT_FN (BUILT_IN_POPCOUNT):
12435 case BUILT_IN_BSWAP32:
12436 case BUILT_IN_BSWAP64:
12440 CASE_FLT_FN (BUILT_IN_SQRT):
12441 /* sqrt(-0.0) is -0.0. */
12442 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12444 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12446 CASE_FLT_FN (BUILT_IN_ASINH):
12447 CASE_FLT_FN (BUILT_IN_ATAN):
12448 CASE_FLT_FN (BUILT_IN_ATANH):
12449 CASE_FLT_FN (BUILT_IN_CBRT):
12450 CASE_FLT_FN (BUILT_IN_CEIL):
12451 CASE_FLT_FN (BUILT_IN_ERF):
12452 CASE_FLT_FN (BUILT_IN_EXPM1):
12453 CASE_FLT_FN (BUILT_IN_FLOOR):
12454 CASE_FLT_FN (BUILT_IN_FMOD):
12455 CASE_FLT_FN (BUILT_IN_FREXP):
12456 CASE_FLT_FN (BUILT_IN_LCEIL):
12457 CASE_FLT_FN (BUILT_IN_LDEXP):
12458 CASE_FLT_FN (BUILT_IN_LFLOOR):
12459 CASE_FLT_FN (BUILT_IN_LLCEIL):
12460 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12461 CASE_FLT_FN (BUILT_IN_LLRINT):
12462 CASE_FLT_FN (BUILT_IN_LLROUND):
12463 CASE_FLT_FN (BUILT_IN_LRINT):
12464 CASE_FLT_FN (BUILT_IN_LROUND):
12465 CASE_FLT_FN (BUILT_IN_MODF):
12466 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12467 CASE_FLT_FN (BUILT_IN_RINT):
12468 CASE_FLT_FN (BUILT_IN_ROUND):
12469 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12470 CASE_FLT_FN (BUILT_IN_SINH):
12471 CASE_FLT_FN (BUILT_IN_TANH):
12472 CASE_FLT_FN (BUILT_IN_TRUNC):
12473 /* True if the 1st argument is nonnegative. */
12474 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12476 CASE_FLT_FN (BUILT_IN_FMAX):
12477 /* True if the 1st OR 2nd arguments are nonnegative. */
12478 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12479 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12481 CASE_FLT_FN (BUILT_IN_FMIN):
12482 /* True if the 1st AND 2nd arguments are nonnegative. */
12483 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12484 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12486 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12487 /* True if the 2nd argument is nonnegative. */
12488 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12490 CASE_FLT_FN (BUILT_IN_POWI):
12491 /* True if the 1st argument is nonnegative or the second
12492 argument is an even integer. */
12493 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12495 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12496 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12499 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12501 CASE_FLT_FN (BUILT_IN_POW):
12502 /* True if the 1st argument is nonnegative or the second
12503 argument is an even integer valued real. */
12504 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12509 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12510 n = real_to_integer (&c);
12513 REAL_VALUE_TYPE cint;
12514 real_from_integer (&cint, VOIDmode, n,
12515 n < 0 ? -1 : 0, 0);
12516 if (real_identical (&c, &cint))
12520 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12527 /* ... fall through ... */
12530 if (truth_value_p (TREE_CODE (t)))
12531 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12535 /* We don't know sign of `t', so be conservative and return false. */
12539 /* Return true when T is an address and is known to be nonzero.
12540 For floating point we further ensure that T is not denormal.
12541 Similar logic is present in nonzero_address in rtlanal.h. */
12544 tree_expr_nonzero_p (tree t)
12546 tree type = TREE_TYPE (t);
12548 /* Doing something useful for floating point would need more work. */
12549 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12552 switch (TREE_CODE (t))
12555 /* Query VRP to see if it has recorded any information about
12556 the range of this object. */
12557 return ssa_name_nonzero_p (t);
12560 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12563 /* We used to test for !integer_zerop here. This does not work correctly
12564 if TREE_CONSTANT_OVERFLOW (t). */
12565 return (TREE_INT_CST_LOW (t) != 0
12566 || TREE_INT_CST_HIGH (t) != 0);
12569 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12571 /* With the presence of negative values it is hard
12572 to say something. */
12573 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12574 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12576 /* One of operands must be positive and the other non-negative. */
12577 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12578 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12583 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12585 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12586 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12592 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12593 tree outer_type = TREE_TYPE (t);
12595 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12596 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12602 tree base = get_base_address (TREE_OPERAND (t, 0));
12607 /* Weak declarations may link to NULL. */
12608 if (VAR_OR_FUNCTION_DECL_P (base))
12609 return !DECL_WEAK (base);
12611 /* Constants are never weak. */
12612 if (CONSTANT_CLASS_P (base))
12619 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12620 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12623 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12624 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12627 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12629 /* When both operands are nonzero, then MAX must be too. */
12630 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12633 /* MAX where operand 0 is positive is positive. */
12634 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12636 /* MAX where operand 1 is positive is positive. */
12637 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12638 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12642 case COMPOUND_EXPR:
12645 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12648 case NON_LVALUE_EXPR:
12649 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12652 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12653 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12656 return alloca_call_p (t);
12664 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12665 attempt to fold the expression to a constant without modifying TYPE,
12668 If the expression could be simplified to a constant, then return
12669 the constant. If the expression would not be simplified to a
12670 constant, then return NULL_TREE. */
12673 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12675 tree tem = fold_binary (code, type, op0, op1);
12676 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12679 /* Given the components of a unary expression CODE, TYPE and OP0,
12680 attempt to fold the expression to a constant without modifying
12683 If the expression could be simplified to a constant, then return
12684 the constant. If the expression would not be simplified to a
12685 constant, then return NULL_TREE. */
12688 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12690 tree tem = fold_unary (code, type, op0);
12691 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12694 /* If EXP represents referencing an element in a constant string
12695 (either via pointer arithmetic or array indexing), return the
12696 tree representing the value accessed, otherwise return NULL. */
12699 fold_read_from_constant_string (tree exp)
12701 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12703 tree exp1 = TREE_OPERAND (exp, 0);
12707 if (TREE_CODE (exp) == INDIRECT_REF)
12708 string = string_constant (exp1, &index);
12711 tree low_bound = array_ref_low_bound (exp);
12712 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12714 /* Optimize the special-case of a zero lower bound.
12716 We convert the low_bound to sizetype to avoid some problems
12717 with constant folding. (E.g. suppose the lower bound is 1,
12718 and its mode is QI. Without the conversion,l (ARRAY
12719 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12720 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12721 if (! integer_zerop (low_bound))
12722 index = size_diffop (index, fold_convert (sizetype, low_bound));
12728 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12729 && TREE_CODE (string) == STRING_CST
12730 && TREE_CODE (index) == INTEGER_CST
12731 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12732 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12734 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12735 return fold_convert (TREE_TYPE (exp),
12736 build_int_cst (NULL_TREE,
12737 (TREE_STRING_POINTER (string)
12738 [TREE_INT_CST_LOW (index)])));
12743 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12744 an integer constant or real constant.
12746 TYPE is the type of the result. */
12749 fold_negate_const (tree arg0, tree type)
12751 tree t = NULL_TREE;
12753 switch (TREE_CODE (arg0))
12757 unsigned HOST_WIDE_INT low;
12758 HOST_WIDE_INT high;
12759 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12760 TREE_INT_CST_HIGH (arg0),
12762 t = build_int_cst_wide (type, low, high);
12763 t = force_fit_type (t, 1,
12764 (overflow | TREE_OVERFLOW (arg0))
12765 && !TYPE_UNSIGNED (type),
12766 TREE_CONSTANT_OVERFLOW (arg0));
12771 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12775 gcc_unreachable ();
12781 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12782 an integer constant or real constant.
12784 TYPE is the type of the result. */
12787 fold_abs_const (tree arg0, tree type)
12789 tree t = NULL_TREE;
12791 switch (TREE_CODE (arg0))
12794 /* If the value is unsigned, then the absolute value is
12795 the same as the ordinary value. */
12796 if (TYPE_UNSIGNED (type))
12798 /* Similarly, if the value is non-negative. */
12799 else if (INT_CST_LT (integer_minus_one_node, arg0))
12801 /* If the value is negative, then the absolute value is
12805 unsigned HOST_WIDE_INT low;
12806 HOST_WIDE_INT high;
12807 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12808 TREE_INT_CST_HIGH (arg0),
12810 t = build_int_cst_wide (type, low, high);
12811 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12812 TREE_CONSTANT_OVERFLOW (arg0));
12817 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12818 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12824 gcc_unreachable ();
12830 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12831 constant. TYPE is the type of the result. */
12834 fold_not_const (tree arg0, tree type)
12836 tree t = NULL_TREE;
12838 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12840 t = build_int_cst_wide (type,
12841 ~ TREE_INT_CST_LOW (arg0),
12842 ~ TREE_INT_CST_HIGH (arg0));
12843 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12844 TREE_CONSTANT_OVERFLOW (arg0));
12849 /* Given CODE, a relational operator, the target type, TYPE and two
12850 constant operands OP0 and OP1, return the result of the
12851 relational operation. If the result is not a compile time
12852 constant, then return NULL_TREE. */
12855 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12857 int result, invert;
12859 /* From here on, the only cases we handle are when the result is
12860 known to be a constant. */
12862 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12864 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12865 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12867 /* Handle the cases where either operand is a NaN. */
12868 if (real_isnan (c0) || real_isnan (c1))
12878 case UNORDERED_EXPR:
12892 if (flag_trapping_math)
12898 gcc_unreachable ();
12901 return constant_boolean_node (result, type);
12904 return constant_boolean_node (real_compare (code, c0, c1), type);
12907 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12909 To compute GT, swap the arguments and do LT.
12910 To compute GE, do LT and invert the result.
12911 To compute LE, swap the arguments, do LT and invert the result.
12912 To compute NE, do EQ and invert the result.
12914 Therefore, the code below must handle only EQ and LT. */
12916 if (code == LE_EXPR || code == GT_EXPR)
12921 code = swap_tree_comparison (code);
12924 /* Note that it is safe to invert for real values here because we
12925 have already handled the one case that it matters. */
12928 if (code == NE_EXPR || code == GE_EXPR)
12931 code = invert_tree_comparison (code, false);
12934 /* Compute a result for LT or EQ if args permit;
12935 Otherwise return T. */
12936 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12938 if (code == EQ_EXPR)
12939 result = tree_int_cst_equal (op0, op1);
12940 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12941 result = INT_CST_LT_UNSIGNED (op0, op1);
12943 result = INT_CST_LT (op0, op1);
12950 return constant_boolean_node (result, type);
12953 /* Build an expression for the a clean point containing EXPR with type TYPE.
12954 Don't build a cleanup point expression for EXPR which don't have side
12958 fold_build_cleanup_point_expr (tree type, tree expr)
12960 /* If the expression does not have side effects then we don't have to wrap
12961 it with a cleanup point expression. */
12962 if (!TREE_SIDE_EFFECTS (expr))
12965 /* If the expression is a return, check to see if the expression inside the
12966 return has no side effects or the right hand side of the modify expression
12967 inside the return. If either don't have side effects set we don't need to
12968 wrap the expression in a cleanup point expression. Note we don't check the
12969 left hand side of the modify because it should always be a return decl. */
12970 if (TREE_CODE (expr) == RETURN_EXPR)
12972 tree op = TREE_OPERAND (expr, 0);
12973 if (!op || !TREE_SIDE_EFFECTS (op))
12975 op = TREE_OPERAND (op, 1);
12976 if (!TREE_SIDE_EFFECTS (op))
12980 return build1 (CLEANUP_POINT_EXPR, type, expr);
12983 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12984 avoid confusing the gimplify process. */
12987 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12989 /* The size of the object is not relevant when talking about its address. */
12990 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12991 t = TREE_OPERAND (t, 0);
12993 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12994 if (TREE_CODE (t) == INDIRECT_REF
12995 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12997 t = TREE_OPERAND (t, 0);
12998 if (TREE_TYPE (t) != ptrtype)
12999 t = build1 (NOP_EXPR, ptrtype, t);
13005 while (handled_component_p (base))
13006 base = TREE_OPERAND (base, 0);
13008 TREE_ADDRESSABLE (base) = 1;
13010 t = build1 (ADDR_EXPR, ptrtype, t);
13017 build_fold_addr_expr (tree t)
13019 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13022 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13023 of an indirection through OP0, or NULL_TREE if no simplification is
13027 fold_indirect_ref_1 (tree type, tree op0)
13033 subtype = TREE_TYPE (sub);
13034 if (!POINTER_TYPE_P (subtype))
13037 if (TREE_CODE (sub) == ADDR_EXPR)
13039 tree op = TREE_OPERAND (sub, 0);
13040 tree optype = TREE_TYPE (op);
13041 /* *&CONST_DECL -> to the value of the const decl. */
13042 if (TREE_CODE (op) == CONST_DECL)
13043 return DECL_INITIAL (op);
13044 /* *&p => p; make sure to handle *&"str"[cst] here. */
13045 if (type == optype)
13047 tree fop = fold_read_from_constant_string (op);
13053 /* *(foo *)&fooarray => fooarray[0] */
13054 else if (TREE_CODE (optype) == ARRAY_TYPE
13055 && type == TREE_TYPE (optype))
13057 tree type_domain = TYPE_DOMAIN (optype);
13058 tree min_val = size_zero_node;
13059 if (type_domain && TYPE_MIN_VALUE (type_domain))
13060 min_val = TYPE_MIN_VALUE (type_domain);
13061 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13063 /* *(foo *)&complexfoo => __real__ complexfoo */
13064 else if (TREE_CODE (optype) == COMPLEX_TYPE
13065 && type == TREE_TYPE (optype))
13066 return fold_build1 (REALPART_EXPR, type, op);
13069 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13070 if (TREE_CODE (sub) == PLUS_EXPR
13071 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13073 tree op00 = TREE_OPERAND (sub, 0);
13074 tree op01 = TREE_OPERAND (sub, 1);
13078 op00type = TREE_TYPE (op00);
13079 if (TREE_CODE (op00) == ADDR_EXPR
13080 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13081 && type == TREE_TYPE (TREE_TYPE (op00type)))
13083 tree size = TYPE_SIZE_UNIT (type);
13084 if (tree_int_cst_equal (size, op01))
13085 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13089 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13090 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13091 && type == TREE_TYPE (TREE_TYPE (subtype)))
13094 tree min_val = size_zero_node;
13095 sub = build_fold_indirect_ref (sub);
13096 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13097 if (type_domain && TYPE_MIN_VALUE (type_domain))
13098 min_val = TYPE_MIN_VALUE (type_domain);
13099 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13105 /* Builds an expression for an indirection through T, simplifying some
13109 build_fold_indirect_ref (tree t)
13111 tree type = TREE_TYPE (TREE_TYPE (t));
13112 tree sub = fold_indirect_ref_1 (type, t);
13117 return build1 (INDIRECT_REF, type, t);
13120 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13123 fold_indirect_ref (tree t)
13125 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13133 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13134 whose result is ignored. The type of the returned tree need not be
13135 the same as the original expression. */
13138 fold_ignored_result (tree t)
13140 if (!TREE_SIDE_EFFECTS (t))
13141 return integer_zero_node;
13144 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13147 t = TREE_OPERAND (t, 0);
13151 case tcc_comparison:
13152 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13153 t = TREE_OPERAND (t, 0);
13154 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13155 t = TREE_OPERAND (t, 1);
13160 case tcc_expression:
13161 switch (TREE_CODE (t))
13163 case COMPOUND_EXPR:
13164 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13166 t = TREE_OPERAND (t, 0);
13170 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13171 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13173 t = TREE_OPERAND (t, 0);
13186 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13187 This can only be applied to objects of a sizetype. */
13190 round_up (tree value, int divisor)
13192 tree div = NULL_TREE;
13194 gcc_assert (divisor > 0);
13198 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13199 have to do anything. Only do this when we are not given a const,
13200 because in that case, this check is more expensive than just
13202 if (TREE_CODE (value) != INTEGER_CST)
13204 div = build_int_cst (TREE_TYPE (value), divisor);
13206 if (multiple_of_p (TREE_TYPE (value), value, div))
13210 /* If divisor is a power of two, simplify this to bit manipulation. */
13211 if (divisor == (divisor & -divisor))
13215 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13216 value = size_binop (PLUS_EXPR, value, t);
13217 t = build_int_cst (TREE_TYPE (value), -divisor);
13218 value = size_binop (BIT_AND_EXPR, value, t);
13223 div = build_int_cst (TREE_TYPE (value), divisor);
13224 value = size_binop (CEIL_DIV_EXPR, value, div);
13225 value = size_binop (MULT_EXPR, value, div);
13231 /* Likewise, but round down. */
13234 round_down (tree value, int divisor)
13236 tree div = NULL_TREE;
13238 gcc_assert (divisor > 0);
13242 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13243 have to do anything. Only do this when we are not given a const,
13244 because in that case, this check is more expensive than just
13246 if (TREE_CODE (value) != INTEGER_CST)
13248 div = build_int_cst (TREE_TYPE (value), divisor);
13250 if (multiple_of_p (TREE_TYPE (value), value, div))
13254 /* If divisor is a power of two, simplify this to bit manipulation. */
13255 if (divisor == (divisor & -divisor))
13259 t = build_int_cst (TREE_TYPE (value), -divisor);
13260 value = size_binop (BIT_AND_EXPR, value, t);
13265 div = build_int_cst (TREE_TYPE (value), divisor);
13266 value = size_binop (FLOOR_DIV_EXPR, value, div);
13267 value = size_binop (MULT_EXPR, value, div);
13273 /* Returns the pointer to the base of the object addressed by EXP and
13274 extracts the information about the offset of the access, storing it
13275 to PBITPOS and POFFSET. */
13278 split_address_to_core_and_offset (tree exp,
13279 HOST_WIDE_INT *pbitpos, tree *poffset)
13282 enum machine_mode mode;
13283 int unsignedp, volatilep;
13284 HOST_WIDE_INT bitsize;
13286 if (TREE_CODE (exp) == ADDR_EXPR)
13288 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13289 poffset, &mode, &unsignedp, &volatilep,
13291 core = build_fold_addr_expr (core);
13297 *poffset = NULL_TREE;
13303 /* Returns true if addresses of E1 and E2 differ by a constant, false
13304 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13307 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13310 HOST_WIDE_INT bitpos1, bitpos2;
13311 tree toffset1, toffset2, tdiff, type;
13313 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13314 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13316 if (bitpos1 % BITS_PER_UNIT != 0
13317 || bitpos2 % BITS_PER_UNIT != 0
13318 || !operand_equal_p (core1, core2, 0))
13321 if (toffset1 && toffset2)
13323 type = TREE_TYPE (toffset1);
13324 if (type != TREE_TYPE (toffset2))
13325 toffset2 = fold_convert (type, toffset2);
13327 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13328 if (!cst_and_fits_in_hwi (tdiff))
13331 *diff = int_cst_value (tdiff);
13333 else if (toffset1 || toffset2)
13335 /* If only one of the offsets is non-constant, the difference cannot
13342 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13346 /* Simplify the floating point expression EXP when the sign of the
13347 result is not significant. Return NULL_TREE if no simplification
13351 fold_strip_sign_ops (tree exp)
13355 switch (TREE_CODE (exp))
13359 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13360 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13364 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13366 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13367 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13368 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13369 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13370 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13371 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13374 case COMPOUND_EXPR:
13375 arg0 = TREE_OPERAND (exp, 0);
13376 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13378 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13382 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13383 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13385 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13386 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13387 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13391 /* Strip sign ops from the argument of "odd" math functions. */
13392 if (negate_mathfn_p (builtin_mathfn_code (exp)))
13394 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13396 return build_function_call_expr (get_callee_fndecl (exp),
13397 build_tree_list (NULL_TREE, arg0));