1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 || (overflowable > 0 && sign_extended_type))
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
286 TREE_CONSTANT_OVERFLOW (t) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Return nonzero if the operation overflows according to UNSIGNED_P.
295 Each argument is given as two `HOST_WIDE_INT' pieces.
296 One argument is L1 and H1; the other, L2 and H2.
297 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
301 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
302 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305 unsigned HOST_WIDE_INT l;
309 h = h1 + h2 + (l < l1);
315 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
317 return OVERFLOW_SUM_SIGN (h1, h2, h);
320 /* Negate a doubleword integer with doubleword result.
321 Return nonzero if the operation overflows, assuming it's signed.
322 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
333 return (*hv & h1) < 0;
343 /* Multiply two doubleword integers with doubleword result.
344 Return nonzero if the operation overflows according to UNSIGNED_P.
345 Each argument is given as two `HOST_WIDE_INT' pieces.
346 One argument is L1 and H1; the other, L2 and H2.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355 HOST_WIDE_INT arg1[4];
356 HOST_WIDE_INT arg2[4];
357 HOST_WIDE_INT prod[4 * 2];
358 unsigned HOST_WIDE_INT carry;
360 unsigned HOST_WIDE_INT toplow, neglow;
361 HOST_WIDE_INT tophigh, neghigh;
363 encode (arg1, l1, h1);
364 encode (arg2, l2, h2);
366 memset (prod, 0, sizeof prod);
368 for (i = 0; i < 4; i++)
371 for (j = 0; j < 4; j++)
374 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
375 carry += arg1[i] * arg2[j];
376 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
378 prod[k] = LOWPART (carry);
379 carry = HIGHPART (carry);
384 decode (prod, lv, hv);
385 decode (prod + 4, &toplow, &tophigh);
387 /* Unsigned overflow is immediate. */
389 return (toplow | tophigh) != 0;
391 /* Check for signed overflow by calculating the signed representation of the
392 top half of the result; it should agree with the low half's sign bit. */
395 neg_double (l2, h2, &neglow, &neghigh);
396 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
400 neg_double (l1, h1, &neglow, &neghigh);
401 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
403 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406 /* Shift the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Shift right if COUNT is negative.
409 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
414 HOST_WIDE_INT count, unsigned int prec,
415 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
417 unsigned HOST_WIDE_INT signmask;
421 rshift_double (l1, h1, -count, prec, lv, hv, arith);
425 if (SHIFT_COUNT_TRUNCATED)
428 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
430 /* Shifting by the host word size is undefined according to the
431 ANSI standard, so we must handle this as a special case. */
435 else if (count >= HOST_BITS_PER_WIDE_INT)
437 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
442 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
443 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
447 /* Sign extend all bits that are beyond the precision. */
449 signmask = -((prec > HOST_BITS_PER_WIDE_INT
450 ? ((unsigned HOST_WIDE_INT) *hv
451 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
452 : (*lv >> (prec - 1))) & 1);
454 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
456 else if (prec >= HOST_BITS_PER_WIDE_INT)
458 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
459 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
464 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
465 *lv |= signmask << prec;
469 /* Shift the doubleword integer in L1, H1 right by COUNT places
470 keeping only PREC bits of result. COUNT must be positive.
471 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
472 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
476 HOST_WIDE_INT count, unsigned int prec,
477 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480 unsigned HOST_WIDE_INT signmask;
483 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486 if (SHIFT_COUNT_TRUNCATED)
489 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
491 /* Shifting by the host word size is undefined according to the
492 ANSI standard, so we must handle this as a special case. */
496 else if (count >= HOST_BITS_PER_WIDE_INT)
499 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
503 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
505 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508 /* Zero / sign extend all bits that are beyond the precision. */
510 if (count >= (HOST_WIDE_INT)prec)
515 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
517 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
519 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
520 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
525 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
526 *lv |= signmask << (prec - count);
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531 keeping only PREC bits of result.
532 Rotate right if COUNT is negative.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
537 HOST_WIDE_INT count, unsigned int prec,
538 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
540 unsigned HOST_WIDE_INT s1l, s2l;
541 HOST_WIDE_INT s1h, s2h;
547 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
548 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 /* Rotate the doubleword integer in L1, H1 left by COUNT places
554 keeping only PREC bits of result. COUNT must be positive.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
569 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
576 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
577 CODE is a tree code for a kind of division, one of
578 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
580 It controls how the quotient is rounded to an integer.
581 Return nonzero if the operation overflows.
582 UNS nonzero says do unsigned division. */
585 div_and_round_double (enum tree_code code, int uns,
586 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
587 HOST_WIDE_INT hnum_orig,
588 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
589 HOST_WIDE_INT hden_orig,
590 unsigned HOST_WIDE_INT *lquo,
591 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
595 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
596 HOST_WIDE_INT den[4], quo[4];
598 unsigned HOST_WIDE_INT work;
599 unsigned HOST_WIDE_INT carry = 0;
600 unsigned HOST_WIDE_INT lnum = lnum_orig;
601 HOST_WIDE_INT hnum = hnum_orig;
602 unsigned HOST_WIDE_INT lden = lden_orig;
603 HOST_WIDE_INT hden = hden_orig;
606 if (hden == 0 && lden == 0)
607 overflow = 1, lden = 1;
609 /* Calculate quotient sign and convert operands to unsigned. */
615 /* (minimum integer) / (-1) is the only overflow case. */
616 if (neg_double (lnum, hnum, &lnum, &hnum)
617 && ((HOST_WIDE_INT) lden & hden) == -1)
623 neg_double (lden, hden, &lden, &hden);
627 if (hnum == 0 && hden == 0)
628 { /* single precision */
630 /* This unsigned division rounds toward zero. */
636 { /* trivial case: dividend < divisor */
637 /* hden != 0 already checked. */
644 memset (quo, 0, sizeof quo);
646 memset (num, 0, sizeof num); /* to zero 9th element */
647 memset (den, 0, sizeof den);
649 encode (num, lnum, hnum);
650 encode (den, lden, hden);
652 /* Special code for when the divisor < BASE. */
653 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
655 /* hnum != 0 already checked. */
656 for (i = 4 - 1; i >= 0; i--)
658 work = num[i] + carry * BASE;
659 quo[i] = work / lden;
665 /* Full double precision division,
666 with thanks to Don Knuth's "Seminumerical Algorithms". */
667 int num_hi_sig, den_hi_sig;
668 unsigned HOST_WIDE_INT quo_est, scale;
670 /* Find the highest nonzero divisor digit. */
671 for (i = 4 - 1;; i--)
678 /* Insure that the first digit of the divisor is at least BASE/2.
679 This is required by the quotient digit estimation algorithm. */
681 scale = BASE / (den[den_hi_sig] + 1);
683 { /* scale divisor and dividend */
685 for (i = 0; i <= 4 - 1; i++)
687 work = (num[i] * scale) + carry;
688 num[i] = LOWPART (work);
689 carry = HIGHPART (work);
694 for (i = 0; i <= 4 - 1; i++)
696 work = (den[i] * scale) + carry;
697 den[i] = LOWPART (work);
698 carry = HIGHPART (work);
699 if (den[i] != 0) den_hi_sig = i;
706 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
708 /* Guess the next quotient digit, quo_est, by dividing the first
709 two remaining dividend digits by the high order quotient digit.
710 quo_est is never low and is at most 2 high. */
711 unsigned HOST_WIDE_INT tmp;
713 num_hi_sig = i + den_hi_sig + 1;
714 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
715 if (num[num_hi_sig] != den[den_hi_sig])
716 quo_est = work / den[den_hi_sig];
720 /* Refine quo_est so it's usually correct, and at most one high. */
721 tmp = work - quo_est * den[den_hi_sig];
723 && (den[den_hi_sig - 1] * quo_est
724 > (tmp * BASE + num[num_hi_sig - 2])))
727 /* Try QUO_EST as the quotient digit, by multiplying the
728 divisor by QUO_EST and subtracting from the remaining dividend.
729 Keep in mind that QUO_EST is the I - 1st digit. */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = quo_est * den[j] + carry;
735 carry = HIGHPART (work);
736 work = num[i + j] - LOWPART (work);
737 num[i + j] = LOWPART (work);
738 carry += HIGHPART (work) != 0;
741 /* If quo_est was high by one, then num[i] went negative and
742 we need to correct things. */
743 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 carry = 0; /* add divisor back in */
747 for (j = 0; j <= den_hi_sig; j++)
749 work = num[i + j] + den[j] + carry;
750 carry = HIGHPART (work);
751 num[i + j] = LOWPART (work);
754 num [num_hi_sig] += carry;
757 /* Store the quotient digit. */
762 decode (quo, lquo, hquo);
765 /* If result is negative, make it so. */
767 neg_double (*lquo, *hquo, lquo, hquo);
769 /* Compute trial remainder: rem = num - (quo * den) */
770 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
771 neg_double (*lrem, *hrem, lrem, hrem);
772 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
777 case TRUNC_MOD_EXPR: /* round toward zero */
778 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
782 case FLOOR_MOD_EXPR: /* round toward negative infinity */
783 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
794 case CEIL_MOD_EXPR: /* round toward positive infinity */
795 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
797 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
805 case ROUND_MOD_EXPR: /* round to closest integer */
807 unsigned HOST_WIDE_INT labs_rem = *lrem;
808 HOST_WIDE_INT habs_rem = *hrem;
809 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
810 HOST_WIDE_INT habs_den = hden, htwice;
812 /* Get absolute values. */
814 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
816 neg_double (lden, hden, &labs_den, &habs_den);
818 /* If (2 * abs (lrem) >= abs (lden)) */
819 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
820 labs_rem, habs_rem, <wice, &htwice);
822 if (((unsigned HOST_WIDE_INT) habs_den
823 < (unsigned HOST_WIDE_INT) htwice)
824 || (((unsigned HOST_WIDE_INT) habs_den
825 == (unsigned HOST_WIDE_INT) htwice)
826 && (labs_den < ltwice)))
830 add_double (*lquo, *hquo,
831 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
846 /* Compute true remainder: rem = num - (quo * den) */
847 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
848 neg_double (*lrem, *hrem, lrem, hrem);
849 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
853 /* If ARG2 divides ARG1 with zero remainder, carries out the division
854 of type CODE and returns the quotient.
855 Otherwise returns NULL_TREE. */
858 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
860 unsigned HOST_WIDE_INT int1l, int2l;
861 HOST_WIDE_INT int1h, int2h;
862 unsigned HOST_WIDE_INT quol, reml;
863 HOST_WIDE_INT quoh, remh;
864 tree type = TREE_TYPE (arg1);
865 int uns = TYPE_UNSIGNED (type);
867 int1l = TREE_INT_CST_LOW (arg1);
868 int1h = TREE_INT_CST_HIGH (arg1);
869 int2l = TREE_INT_CST_LOW (arg2);
870 int2h = TREE_INT_CST_HIGH (arg2);
872 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
873 &quol, &quoh, &reml, &remh);
874 if (remh != 0 || reml != 0)
877 return build_int_cst_wide (type, quol, quoh);
880 /* Return true if the built-in mathematical function specified by CODE
881 is odd, i.e. -f(x) == f(-x). */
884 negate_mathfn_p (enum built_in_function code)
888 CASE_FLT_FN (BUILT_IN_ASIN):
889 CASE_FLT_FN (BUILT_IN_ASINH):
890 CASE_FLT_FN (BUILT_IN_ATAN):
891 CASE_FLT_FN (BUILT_IN_ATANH):
892 CASE_FLT_FN (BUILT_IN_CBRT):
893 CASE_FLT_FN (BUILT_IN_SIN):
894 CASE_FLT_FN (BUILT_IN_SINH):
895 CASE_FLT_FN (BUILT_IN_TAN):
896 CASE_FLT_FN (BUILT_IN_TANH):
897 CASE_FLT_FN (BUILT_IN_ERF):
906 /* Check whether we may negate an integer constant T without causing
910 may_negate_without_overflow_p (tree t)
912 unsigned HOST_WIDE_INT val;
916 gcc_assert (TREE_CODE (t) == INTEGER_CST);
918 type = TREE_TYPE (t);
919 if (TYPE_UNSIGNED (type))
922 prec = TYPE_PRECISION (type);
923 if (prec > HOST_BITS_PER_WIDE_INT)
925 if (TREE_INT_CST_LOW (t) != 0)
927 prec -= HOST_BITS_PER_WIDE_INT;
928 val = TREE_INT_CST_HIGH (t);
931 val = TREE_INT_CST_LOW (t);
932 if (prec < HOST_BITS_PER_WIDE_INT)
933 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
934 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
937 /* Determine whether an expression T can be cheaply negated using
938 the function negate_expr without introducing undefined overflow. */
941 negate_expr_p (tree t)
948 type = TREE_TYPE (t);
951 switch (TREE_CODE (t))
954 if (TYPE_UNSIGNED (type)
955 || (flag_wrapv && ! flag_trapv))
958 /* Check that -CST will not overflow type. */
959 return may_negate_without_overflow_p (t);
961 return INTEGRAL_TYPE_P (type)
962 && (TYPE_UNSIGNED (type)
963 || (flag_wrapv && !flag_trapv));
970 return negate_expr_p (TREE_REALPART (t))
971 && negate_expr_p (TREE_IMAGPART (t));
974 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
976 /* -(A + B) -> (-B) - A. */
977 if (negate_expr_p (TREE_OPERAND (t, 1))
978 && reorder_operands_p (TREE_OPERAND (t, 0),
979 TREE_OPERAND (t, 1)))
981 /* -(A + B) -> (-A) - B. */
982 return negate_expr_p (TREE_OPERAND (t, 0));
985 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
986 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987 && reorder_operands_p (TREE_OPERAND (t, 0),
988 TREE_OPERAND (t, 1));
991 if (TYPE_UNSIGNED (TREE_TYPE (t)))
997 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
998 return negate_expr_p (TREE_OPERAND (t, 1))
999 || negate_expr_p (TREE_OPERAND (t, 0));
1002 case TRUNC_DIV_EXPR:
1003 case ROUND_DIV_EXPR:
1004 case FLOOR_DIV_EXPR:
1006 case EXACT_DIV_EXPR:
1007 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1009 return negate_expr_p (TREE_OPERAND (t, 1))
1010 || negate_expr_p (TREE_OPERAND (t, 0));
1013 /* Negate -((double)float) as (double)(-float). */
1014 if (TREE_CODE (type) == REAL_TYPE)
1016 tree tem = strip_float_extensions (t);
1018 return negate_expr_p (tem);
1023 /* Negate -f(x) as f(-x). */
1024 if (negate_mathfn_p (builtin_mathfn_code (t)))
1025 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1029 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1030 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1032 tree op1 = TREE_OPERAND (t, 1);
1033 if (TREE_INT_CST_HIGH (op1) == 0
1034 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1035 == TREE_INT_CST_LOW (op1))
1046 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1047 simplification is possible.
1048 If negate_expr_p would return true for T, NULL_TREE will never be
1052 fold_negate_expr (tree t)
1054 tree type = TREE_TYPE (t);
1057 switch (TREE_CODE (t))
1059 /* Convert - (~A) to A + 1. */
1061 if (INTEGRAL_TYPE_P (type))
1062 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1063 build_int_cst (type, 1));
1067 tem = fold_negate_const (t, type);
1068 if (! TREE_OVERFLOW (tem)
1069 || TYPE_UNSIGNED (type)
1075 tem = fold_negate_const (t, type);
1076 /* Two's complement FP formats, such as c4x, may overflow. */
1077 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1083 tree rpart = negate_expr (TREE_REALPART (t));
1084 tree ipart = negate_expr (TREE_IMAGPART (t));
1086 if ((TREE_CODE (rpart) == REAL_CST
1087 && TREE_CODE (ipart) == REAL_CST)
1088 || (TREE_CODE (rpart) == INTEGER_CST
1089 && TREE_CODE (ipart) == INTEGER_CST))
1090 return build_complex (type, rpart, ipart);
1095 return TREE_OPERAND (t, 0);
1098 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1100 /* -(A + B) -> (-B) - A. */
1101 if (negate_expr_p (TREE_OPERAND (t, 1))
1102 && reorder_operands_p (TREE_OPERAND (t, 0),
1103 TREE_OPERAND (t, 1)))
1105 tem = negate_expr (TREE_OPERAND (t, 1));
1106 return fold_build2 (MINUS_EXPR, type,
1107 tem, TREE_OPERAND (t, 0));
1110 /* -(A + B) -> (-A) - B. */
1111 if (negate_expr_p (TREE_OPERAND (t, 0)))
1113 tem = negate_expr (TREE_OPERAND (t, 0));
1114 return fold_build2 (MINUS_EXPR, type,
1115 tem, TREE_OPERAND (t, 1));
1121 /* - (A - B) -> B - A */
1122 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1123 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1124 return fold_build2 (MINUS_EXPR, type,
1125 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1129 if (TYPE_UNSIGNED (type))
1135 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1137 tem = TREE_OPERAND (t, 1);
1138 if (negate_expr_p (tem))
1139 return fold_build2 (TREE_CODE (t), type,
1140 TREE_OPERAND (t, 0), negate_expr (tem));
1141 tem = TREE_OPERAND (t, 0);
1142 if (negate_expr_p (tem))
1143 return fold_build2 (TREE_CODE (t), type,
1144 negate_expr (tem), TREE_OPERAND (t, 1));
1148 case TRUNC_DIV_EXPR:
1149 case ROUND_DIV_EXPR:
1150 case FLOOR_DIV_EXPR:
1152 case EXACT_DIV_EXPR:
1153 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1155 tem = TREE_OPERAND (t, 1);
1156 if (negate_expr_p (tem))
1157 return fold_build2 (TREE_CODE (t), type,
1158 TREE_OPERAND (t, 0), negate_expr (tem));
1159 tem = TREE_OPERAND (t, 0);
1160 if (negate_expr_p (tem))
1161 return fold_build2 (TREE_CODE (t), type,
1162 negate_expr (tem), TREE_OPERAND (t, 1));
1167 /* Convert -((double)float) into (double)(-float). */
1168 if (TREE_CODE (type) == REAL_TYPE)
1170 tem = strip_float_extensions (t);
1171 if (tem != t && negate_expr_p (tem))
1172 return negate_expr (tem);
1177 /* Negate -f(x) as f(-x). */
1178 if (negate_mathfn_p (builtin_mathfn_code (t))
1179 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1181 tree fndecl, arg, arglist;
1183 fndecl = get_callee_fndecl (t);
1184 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1185 arglist = build_tree_list (NULL_TREE, arg);
1186 return build_function_call_expr (fndecl, arglist);
1191 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1192 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1194 tree op1 = TREE_OPERAND (t, 1);
1195 if (TREE_INT_CST_HIGH (op1) == 0
1196 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1197 == TREE_INT_CST_LOW (op1))
1199 tree ntype = TYPE_UNSIGNED (type)
1200 ? lang_hooks.types.signed_type (type)
1201 : lang_hooks.types.unsigned_type (type);
1202 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1203 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1204 return fold_convert (type, temp);
1216 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1217 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1218 return NULL_TREE. */
1221 negate_expr (tree t)
1228 type = TREE_TYPE (t);
1229 STRIP_SIGN_NOPS (t);
1231 tem = fold_negate_expr (t);
1233 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1234 return fold_convert (type, tem);
1237 /* Split a tree IN into a constant, literal and variable parts that could be
1238 combined with CODE to make IN. "constant" means an expression with
1239 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1240 commutative arithmetic operation. Store the constant part into *CONP,
1241 the literal in *LITP and return the variable part. If a part isn't
1242 present, set it to null. If the tree does not decompose in this way,
1243 return the entire tree as the variable part and the other parts as null.
1245 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1246 case, we negate an operand that was subtracted. Except if it is a
1247 literal for which we use *MINUS_LITP instead.
1249 If NEGATE_P is true, we are negating all of IN, again except a literal
1250 for which we use *MINUS_LITP instead.
1252 If IN is itself a literal or constant, return it as appropriate.
1254 Note that we do not guarantee that any of the three values will be the
1255 same type as IN, but they will have the same signedness and mode. */
1258 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1259 tree *minus_litp, int negate_p)
1267 /* Strip any conversions that don't change the machine mode or signedness. */
1268 STRIP_SIGN_NOPS (in);
1270 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1272 else if (TREE_CODE (in) == code
1273 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1274 /* We can associate addition and subtraction together (even
1275 though the C standard doesn't say so) for integers because
1276 the value is not affected. For reals, the value might be
1277 affected, so we can't. */
1278 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1279 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1281 tree op0 = TREE_OPERAND (in, 0);
1282 tree op1 = TREE_OPERAND (in, 1);
1283 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1284 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1286 /* First see if either of the operands is a literal, then a constant. */
1287 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1288 *litp = op0, op0 = 0;
1289 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1290 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1292 if (op0 != 0 && TREE_CONSTANT (op0))
1293 *conp = op0, op0 = 0;
1294 else if (op1 != 0 && TREE_CONSTANT (op1))
1295 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1297 /* If we haven't dealt with either operand, this is not a case we can
1298 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1299 if (op0 != 0 && op1 != 0)
1304 var = op1, neg_var_p = neg1_p;
1306 /* Now do any needed negations. */
1308 *minus_litp = *litp, *litp = 0;
1310 *conp = negate_expr (*conp);
1312 var = negate_expr (var);
1314 else if (TREE_CONSTANT (in))
1322 *minus_litp = *litp, *litp = 0;
1323 else if (*minus_litp)
1324 *litp = *minus_litp, *minus_litp = 0;
1325 *conp = negate_expr (*conp);
1326 var = negate_expr (var);
1332 /* Re-associate trees split by the above function. T1 and T2 are either
1333 expressions to associate or null. Return the new expression, if any. If
1334 we build an operation, do it in TYPE and with CODE. */
1337 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1344 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1345 try to fold this since we will have infinite recursion. But do
1346 deal with any NEGATE_EXPRs. */
1347 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1348 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1350 if (code == PLUS_EXPR)
1352 if (TREE_CODE (t1) == NEGATE_EXPR)
1353 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1354 fold_convert (type, TREE_OPERAND (t1, 0)));
1355 else if (TREE_CODE (t2) == NEGATE_EXPR)
1356 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1357 fold_convert (type, TREE_OPERAND (t2, 0)));
1358 else if (integer_zerop (t2))
1359 return fold_convert (type, t1);
1361 else if (code == MINUS_EXPR)
1363 if (integer_zerop (t2))
1364 return fold_convert (type, t1);
1367 return build2 (code, type, fold_convert (type, t1),
1368 fold_convert (type, t2));
1371 return fold_build2 (code, type, fold_convert (type, t1),
1372 fold_convert (type, t2));
1375 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1376 to produce a new constant. Return NULL_TREE if we don't know how
1377 to evaluate CODE at compile-time.
1379 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1382 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1384 unsigned HOST_WIDE_INT int1l, int2l;
1385 HOST_WIDE_INT int1h, int2h;
1386 unsigned HOST_WIDE_INT low;
1388 unsigned HOST_WIDE_INT garbagel;
1389 HOST_WIDE_INT garbageh;
1391 tree type = TREE_TYPE (arg1);
1392 int uns = TYPE_UNSIGNED (type);
1394 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1397 int1l = TREE_INT_CST_LOW (arg1);
1398 int1h = TREE_INT_CST_HIGH (arg1);
1399 int2l = TREE_INT_CST_LOW (arg2);
1400 int2h = TREE_INT_CST_HIGH (arg2);
1405 low = int1l | int2l, hi = int1h | int2h;
1409 low = int1l ^ int2l, hi = int1h ^ int2h;
1413 low = int1l & int2l, hi = int1h & int2h;
1419 /* It's unclear from the C standard whether shifts can overflow.
1420 The following code ignores overflow; perhaps a C standard
1421 interpretation ruling is needed. */
1422 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1429 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1434 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1438 neg_double (int2l, int2h, &low, &hi);
1439 add_double (int1l, int1h, low, hi, &low, &hi);
1440 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1444 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1447 case TRUNC_DIV_EXPR:
1448 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1449 case EXACT_DIV_EXPR:
1450 /* This is a shortcut for a common special case. */
1451 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1452 && ! TREE_CONSTANT_OVERFLOW (arg1)
1453 && ! TREE_CONSTANT_OVERFLOW (arg2)
1454 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1456 if (code == CEIL_DIV_EXPR)
1459 low = int1l / int2l, hi = 0;
1463 /* ... fall through ... */
1465 case ROUND_DIV_EXPR:
1466 if (int2h == 0 && int2l == 0)
1468 if (int2h == 0 && int2l == 1)
1470 low = int1l, hi = int1h;
1473 if (int1l == int2l && int1h == int2h
1474 && ! (int1l == 0 && int1h == 0))
1479 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1480 &low, &hi, &garbagel, &garbageh);
1483 case TRUNC_MOD_EXPR:
1484 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1485 /* This is a shortcut for a common special case. */
1486 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1487 && ! TREE_CONSTANT_OVERFLOW (arg1)
1488 && ! TREE_CONSTANT_OVERFLOW (arg2)
1489 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1491 if (code == CEIL_MOD_EXPR)
1493 low = int1l % int2l, hi = 0;
1497 /* ... fall through ... */
1499 case ROUND_MOD_EXPR:
1500 if (int2h == 0 && int2l == 0)
1502 overflow = div_and_round_double (code, uns,
1503 int1l, int1h, int2l, int2h,
1504 &garbagel, &garbageh, &low, &hi);
1510 low = (((unsigned HOST_WIDE_INT) int1h
1511 < (unsigned HOST_WIDE_INT) int2h)
1512 || (((unsigned HOST_WIDE_INT) int1h
1513 == (unsigned HOST_WIDE_INT) int2h)
1516 low = (int1h < int2h
1517 || (int1h == int2h && int1l < int2l));
1519 if (low == (code == MIN_EXPR))
1520 low = int1l, hi = int1h;
1522 low = int2l, hi = int2h;
1529 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1533 /* Propagate overflow flags ourselves. */
1534 if (((!uns || is_sizetype) && overflow)
1535 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1538 TREE_OVERFLOW (t) = 1;
1539 TREE_CONSTANT_OVERFLOW (t) = 1;
1541 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1544 TREE_CONSTANT_OVERFLOW (t) = 1;
1548 t = force_fit_type (t, 1,
1549 ((!uns || is_sizetype) && overflow)
1550 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1551 TREE_CONSTANT_OVERFLOW (arg1)
1552 | TREE_CONSTANT_OVERFLOW (arg2));
1557 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1558 constant. We assume ARG1 and ARG2 have the same data type, or at least
1559 are the same kind of constant and the same machine mode. Return zero if
1560 combining the constants is not allowed in the current operating mode.
1562 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1565 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1567 /* Sanity check for the recursive cases. */
1574 if (TREE_CODE (arg1) == INTEGER_CST)
1575 return int_const_binop (code, arg1, arg2, notrunc);
1577 if (TREE_CODE (arg1) == REAL_CST)
1579 enum machine_mode mode;
1582 REAL_VALUE_TYPE value;
1583 REAL_VALUE_TYPE result;
1587 /* The following codes are handled by real_arithmetic. */
1602 d1 = TREE_REAL_CST (arg1);
1603 d2 = TREE_REAL_CST (arg2);
1605 type = TREE_TYPE (arg1);
1606 mode = TYPE_MODE (type);
1608 /* Don't perform operation if we honor signaling NaNs and
1609 either operand is a NaN. */
1610 if (HONOR_SNANS (mode)
1611 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1614 /* Don't perform operation if it would raise a division
1615 by zero exception. */
1616 if (code == RDIV_EXPR
1617 && REAL_VALUES_EQUAL (d2, dconst0)
1618 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1621 /* If either operand is a NaN, just return it. Otherwise, set up
1622 for floating-point trap; we return an overflow. */
1623 if (REAL_VALUE_ISNAN (d1))
1625 else if (REAL_VALUE_ISNAN (d2))
1628 inexact = real_arithmetic (&value, code, &d1, &d2);
1629 real_convert (&result, mode, &value);
1631 /* Don't constant fold this floating point operation if
1632 the result has overflowed and flag_trapping_math. */
1633 if (flag_trapping_math
1634 && MODE_HAS_INFINITIES (mode)
1635 && REAL_VALUE_ISINF (result)
1636 && !REAL_VALUE_ISINF (d1)
1637 && !REAL_VALUE_ISINF (d2))
1640 /* Don't constant fold this floating point operation if the
1641 result may dependent upon the run-time rounding mode and
1642 flag_rounding_math is set, or if GCC's software emulation
1643 is unable to accurately represent the result. */
1644 if ((flag_rounding_math
1645 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1646 && !flag_unsafe_math_optimizations))
1647 && (inexact || !real_identical (&result, &value)))
1650 t = build_real (type, result);
1652 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1653 TREE_CONSTANT_OVERFLOW (t)
1655 | TREE_CONSTANT_OVERFLOW (arg1)
1656 | TREE_CONSTANT_OVERFLOW (arg2);
1660 if (TREE_CODE (arg1) == COMPLEX_CST)
1662 tree type = TREE_TYPE (arg1);
1663 tree r1 = TREE_REALPART (arg1);
1664 tree i1 = TREE_IMAGPART (arg1);
1665 tree r2 = TREE_REALPART (arg2);
1666 tree i2 = TREE_IMAGPART (arg2);
1673 real = const_binop (code, r1, r2, notrunc);
1674 imag = const_binop (code, i1, i2, notrunc);
1678 real = const_binop (MINUS_EXPR,
1679 const_binop (MULT_EXPR, r1, r2, notrunc),
1680 const_binop (MULT_EXPR, i1, i2, notrunc),
1682 imag = const_binop (PLUS_EXPR,
1683 const_binop (MULT_EXPR, r1, i2, notrunc),
1684 const_binop (MULT_EXPR, i1, r2, notrunc),
1691 = const_binop (PLUS_EXPR,
1692 const_binop (MULT_EXPR, r2, r2, notrunc),
1693 const_binop (MULT_EXPR, i2, i2, notrunc),
1696 = const_binop (PLUS_EXPR,
1697 const_binop (MULT_EXPR, r1, r2, notrunc),
1698 const_binop (MULT_EXPR, i1, i2, notrunc),
1701 = const_binop (MINUS_EXPR,
1702 const_binop (MULT_EXPR, i1, r2, notrunc),
1703 const_binop (MULT_EXPR, r1, i2, notrunc),
1706 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1707 code = TRUNC_DIV_EXPR;
1709 real = const_binop (code, t1, magsquared, notrunc);
1710 imag = const_binop (code, t2, magsquared, notrunc);
1719 return build_complex (type, real, imag);
1725 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1726 indicates which particular sizetype to create. */
1729 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1731 return build_int_cst (sizetype_tab[(int) kind], number);
1734 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1735 is a tree code. The type of the result is taken from the operands.
1736 Both must be the same type integer type and it must be a size type.
1737 If the operands are constant, so is the result. */
1740 size_binop (enum tree_code code, tree arg0, tree arg1)
1742 tree type = TREE_TYPE (arg0);
1744 if (arg0 == error_mark_node || arg1 == error_mark_node)
1745 return error_mark_node;
1747 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1748 && type == TREE_TYPE (arg1));
1750 /* Handle the special case of two integer constants faster. */
1751 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1753 /* And some specific cases even faster than that. */
1754 if (code == PLUS_EXPR && integer_zerop (arg0))
1756 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1757 && integer_zerop (arg1))
1759 else if (code == MULT_EXPR && integer_onep (arg0))
1762 /* Handle general case of two integer constants. */
1763 return int_const_binop (code, arg0, arg1, 0);
1766 return fold_build2 (code, type, arg0, arg1);
1769 /* Given two values, either both of sizetype or both of bitsizetype,
1770 compute the difference between the two values. Return the value
1771 in signed type corresponding to the type of the operands. */
1774 size_diffop (tree arg0, tree arg1)
1776 tree type = TREE_TYPE (arg0);
1779 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1780 && type == TREE_TYPE (arg1));
1782 /* If the type is already signed, just do the simple thing. */
1783 if (!TYPE_UNSIGNED (type))
1784 return size_binop (MINUS_EXPR, arg0, arg1);
1786 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1788 /* If either operand is not a constant, do the conversions to the signed
1789 type and subtract. The hardware will do the right thing with any
1790 overflow in the subtraction. */
1791 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1792 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1793 fold_convert (ctype, arg1));
1795 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1796 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1797 overflow) and negate (which can't either). Special-case a result
1798 of zero while we're here. */
1799 if (tree_int_cst_equal (arg0, arg1))
1800 return build_int_cst (ctype, 0);
1801 else if (tree_int_cst_lt (arg1, arg0))
1802 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1804 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1805 fold_convert (ctype, size_binop (MINUS_EXPR,
1809 /* A subroutine of fold_convert_const handling conversions of an
1810 INTEGER_CST to another integer type. */
1813 fold_convert_const_int_from_int (tree type, tree arg1)
1817 /* Given an integer constant, make new constant with new type,
1818 appropriately sign-extended or truncated. */
1819 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1820 TREE_INT_CST_HIGH (arg1));
1822 t = force_fit_type (t,
1823 /* Don't set the overflow when
1824 converting a pointer */
1825 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1826 (TREE_INT_CST_HIGH (arg1) < 0
1827 && (TYPE_UNSIGNED (type)
1828 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1829 | TREE_OVERFLOW (arg1),
1830 TREE_CONSTANT_OVERFLOW (arg1));
1835 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1836 to an integer type. */
1839 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1844 /* The following code implements the floating point to integer
1845 conversion rules required by the Java Language Specification,
1846 that IEEE NaNs are mapped to zero and values that overflow
1847 the target precision saturate, i.e. values greater than
1848 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1849 are mapped to INT_MIN. These semantics are allowed by the
1850 C and C++ standards that simply state that the behavior of
1851 FP-to-integer conversion is unspecified upon overflow. */
1853 HOST_WIDE_INT high, low;
1855 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1859 case FIX_TRUNC_EXPR:
1860 real_trunc (&r, VOIDmode, &x);
1864 real_ceil (&r, VOIDmode, &x);
1867 case FIX_FLOOR_EXPR:
1868 real_floor (&r, VOIDmode, &x);
1871 case FIX_ROUND_EXPR:
1872 real_round (&r, VOIDmode, &x);
1879 /* If R is NaN, return zero and show we have an overflow. */
1880 if (REAL_VALUE_ISNAN (r))
1887 /* See if R is less than the lower bound or greater than the
1892 tree lt = TYPE_MIN_VALUE (type);
1893 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1894 if (REAL_VALUES_LESS (r, l))
1897 high = TREE_INT_CST_HIGH (lt);
1898 low = TREE_INT_CST_LOW (lt);
1904 tree ut = TYPE_MAX_VALUE (type);
1907 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1908 if (REAL_VALUES_LESS (u, r))
1911 high = TREE_INT_CST_HIGH (ut);
1912 low = TREE_INT_CST_LOW (ut);
1918 REAL_VALUE_TO_INT (&low, &high, r);
1920 t = build_int_cst_wide (type, low, high);
1922 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1923 TREE_CONSTANT_OVERFLOW (arg1));
1927 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1928 to another floating point type. */
1931 fold_convert_const_real_from_real (tree type, tree arg1)
1933 REAL_VALUE_TYPE value;
1936 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1937 t = build_real (type, value);
1939 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1940 TREE_CONSTANT_OVERFLOW (t)
1941 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1945 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1946 type TYPE. If no simplification can be done return NULL_TREE. */
1949 fold_convert_const (enum tree_code code, tree type, tree arg1)
1951 if (TREE_TYPE (arg1) == type)
1954 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1956 if (TREE_CODE (arg1) == INTEGER_CST)
1957 return fold_convert_const_int_from_int (type, arg1);
1958 else if (TREE_CODE (arg1) == REAL_CST)
1959 return fold_convert_const_int_from_real (code, type, arg1);
1961 else if (TREE_CODE (type) == REAL_TYPE)
1963 if (TREE_CODE (arg1) == INTEGER_CST)
1964 return build_real_from_int_cst (type, arg1);
1965 if (TREE_CODE (arg1) == REAL_CST)
1966 return fold_convert_const_real_from_real (type, arg1);
1971 /* Construct a vector of zero elements of vector type TYPE. */
1974 build_zero_vector (tree type)
1979 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1980 units = TYPE_VECTOR_SUBPARTS (type);
1983 for (i = 0; i < units; i++)
1984 list = tree_cons (NULL_TREE, elem, list);
1985 return build_vector (type, list);
1988 /* Convert expression ARG to type TYPE. Used by the middle-end for
1989 simple conversions in preference to calling the front-end's convert. */
1992 fold_convert (tree type, tree arg)
1994 tree orig = TREE_TYPE (arg);
2000 if (TREE_CODE (arg) == ERROR_MARK
2001 || TREE_CODE (type) == ERROR_MARK
2002 || TREE_CODE (orig) == ERROR_MARK)
2003 return error_mark_node;
2005 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2006 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2007 TYPE_MAIN_VARIANT (orig)))
2008 return fold_build1 (NOP_EXPR, type, arg);
2010 switch (TREE_CODE (type))
2012 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2013 case POINTER_TYPE: case REFERENCE_TYPE:
2015 if (TREE_CODE (arg) == INTEGER_CST)
2017 tem = fold_convert_const (NOP_EXPR, type, arg);
2018 if (tem != NULL_TREE)
2021 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2022 || TREE_CODE (orig) == OFFSET_TYPE)
2023 return fold_build1 (NOP_EXPR, type, arg);
2024 if (TREE_CODE (orig) == COMPLEX_TYPE)
2026 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2027 return fold_convert (type, tem);
2029 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2030 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2031 return fold_build1 (NOP_EXPR, type, arg);
2034 if (TREE_CODE (arg) == INTEGER_CST)
2036 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2037 if (tem != NULL_TREE)
2040 else if (TREE_CODE (arg) == REAL_CST)
2042 tem = fold_convert_const (NOP_EXPR, type, arg);
2043 if (tem != NULL_TREE)
2047 switch (TREE_CODE (orig))
2050 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2051 case POINTER_TYPE: case REFERENCE_TYPE:
2052 return fold_build1 (FLOAT_EXPR, type, arg);
2055 return fold_build1 (NOP_EXPR, type, arg);
2058 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2059 return fold_convert (type, tem);
2066 switch (TREE_CODE (orig))
2069 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2070 case POINTER_TYPE: case REFERENCE_TYPE:
2072 return build2 (COMPLEX_EXPR, type,
2073 fold_convert (TREE_TYPE (type), arg),
2074 fold_convert (TREE_TYPE (type), integer_zero_node));
2079 if (TREE_CODE (arg) == COMPLEX_EXPR)
2081 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2082 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2083 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2086 arg = save_expr (arg);
2087 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2088 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2089 rpart = fold_convert (TREE_TYPE (type), rpart);
2090 ipart = fold_convert (TREE_TYPE (type), ipart);
2091 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2099 if (integer_zerop (arg))
2100 return build_zero_vector (type);
2101 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2102 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2103 || TREE_CODE (orig) == VECTOR_TYPE);
2104 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2107 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2114 /* Return false if expr can be assumed not to be an lvalue, true
2118 maybe_lvalue_p (tree x)
2120 /* We only need to wrap lvalue tree codes. */
2121 switch (TREE_CODE (x))
2132 case ALIGN_INDIRECT_REF:
2133 case MISALIGNED_INDIRECT_REF:
2135 case ARRAY_RANGE_REF:
2141 case PREINCREMENT_EXPR:
2142 case PREDECREMENT_EXPR:
2144 case TRY_CATCH_EXPR:
2145 case WITH_CLEANUP_EXPR:
2156 /* Assume the worst for front-end tree codes. */
2157 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2165 /* Return an expr equal to X but certainly not valid as an lvalue. */
2170 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2175 if (! maybe_lvalue_p (x))
2177 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2180 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2181 Zero means allow extended lvalues. */
2183 int pedantic_lvalues;
2185 /* When pedantic, return an expr equal to X but certainly not valid as a
2186 pedantic lvalue. Otherwise, return X. */
2189 pedantic_non_lvalue (tree x)
2191 if (pedantic_lvalues)
2192 return non_lvalue (x);
2197 /* Given a tree comparison code, return the code that is the logical inverse
2198 of the given code. It is not safe to do this for floating-point
2199 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2200 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2203 invert_tree_comparison (enum tree_code code, bool honor_nans)
2205 if (honor_nans && flag_trapping_math)
2215 return honor_nans ? UNLE_EXPR : LE_EXPR;
2217 return honor_nans ? UNLT_EXPR : LT_EXPR;
2219 return honor_nans ? UNGE_EXPR : GE_EXPR;
2221 return honor_nans ? UNGT_EXPR : GT_EXPR;
2235 return UNORDERED_EXPR;
2236 case UNORDERED_EXPR:
2237 return ORDERED_EXPR;
2243 /* Similar, but return the comparison that results if the operands are
2244 swapped. This is safe for floating-point. */
2247 swap_tree_comparison (enum tree_code code)
2254 case UNORDERED_EXPR:
2280 /* Convert a comparison tree code from an enum tree_code representation
2281 into a compcode bit-based encoding. This function is the inverse of
2282 compcode_to_comparison. */
2284 static enum comparison_code
2285 comparison_to_compcode (enum tree_code code)
2302 return COMPCODE_ORD;
2303 case UNORDERED_EXPR:
2304 return COMPCODE_UNORD;
2306 return COMPCODE_UNLT;
2308 return COMPCODE_UNEQ;
2310 return COMPCODE_UNLE;
2312 return COMPCODE_UNGT;
2314 return COMPCODE_LTGT;
2316 return COMPCODE_UNGE;
2322 /* Convert a compcode bit-based encoding of a comparison operator back
2323 to GCC's enum tree_code representation. This function is the
2324 inverse of comparison_to_compcode. */
2326 static enum tree_code
2327 compcode_to_comparison (enum comparison_code code)
2344 return ORDERED_EXPR;
2345 case COMPCODE_UNORD:
2346 return UNORDERED_EXPR;
2364 /* Return a tree for the comparison which is the combination of
2365 doing the AND or OR (depending on CODE) of the two operations LCODE
2366 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2367 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2368 if this makes the transformation invalid. */
2371 combine_comparisons (enum tree_code code, enum tree_code lcode,
2372 enum tree_code rcode, tree truth_type,
2373 tree ll_arg, tree lr_arg)
2375 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2376 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2377 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2378 enum comparison_code compcode;
2382 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2383 compcode = lcompcode & rcompcode;
2386 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2387 compcode = lcompcode | rcompcode;
2396 /* Eliminate unordered comparisons, as well as LTGT and ORD
2397 which are not used unless the mode has NaNs. */
2398 compcode &= ~COMPCODE_UNORD;
2399 if (compcode == COMPCODE_LTGT)
2400 compcode = COMPCODE_NE;
2401 else if (compcode == COMPCODE_ORD)
2402 compcode = COMPCODE_TRUE;
2404 else if (flag_trapping_math)
2406 /* Check that the original operation and the optimized ones will trap
2407 under the same condition. */
2408 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2409 && (lcompcode != COMPCODE_EQ)
2410 && (lcompcode != COMPCODE_ORD);
2411 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2412 && (rcompcode != COMPCODE_EQ)
2413 && (rcompcode != COMPCODE_ORD);
2414 bool trap = (compcode & COMPCODE_UNORD) == 0
2415 && (compcode != COMPCODE_EQ)
2416 && (compcode != COMPCODE_ORD);
2418 /* In a short-circuited boolean expression the LHS might be
2419 such that the RHS, if evaluated, will never trap. For
2420 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2421 if neither x nor y is NaN. (This is a mixed blessing: for
2422 example, the expression above will never trap, hence
2423 optimizing it to x < y would be invalid). */
2424 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2425 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2428 /* If the comparison was short-circuited, and only the RHS
2429 trapped, we may now generate a spurious trap. */
2431 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2434 /* If we changed the conditions that cause a trap, we lose. */
2435 if ((ltrap || rtrap) != trap)
2439 if (compcode == COMPCODE_TRUE)
2440 return constant_boolean_node (true, truth_type);
2441 else if (compcode == COMPCODE_FALSE)
2442 return constant_boolean_node (false, truth_type);
2444 return fold_build2 (compcode_to_comparison (compcode),
2445 truth_type, ll_arg, lr_arg);
2448 /* Return nonzero if CODE is a tree code that represents a truth value. */
2451 truth_value_p (enum tree_code code)
2453 return (TREE_CODE_CLASS (code) == tcc_comparison
2454 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2455 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2456 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2459 /* Return nonzero if two operands (typically of the same tree node)
2460 are necessarily equal. If either argument has side-effects this
2461 function returns zero. FLAGS modifies behavior as follows:
2463 If OEP_ONLY_CONST is set, only return nonzero for constants.
2464 This function tests whether the operands are indistinguishable;
2465 it does not test whether they are equal using C's == operation.
2466 The distinction is important for IEEE floating point, because
2467 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2468 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2470 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2471 even though it may hold multiple values during a function.
2472 This is because a GCC tree node guarantees that nothing else is
2473 executed between the evaluation of its "operands" (which may often
2474 be evaluated in arbitrary order). Hence if the operands themselves
2475 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2476 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2477 unset means assuming isochronic (or instantaneous) tree equivalence.
2478 Unless comparing arbitrary expression trees, such as from different
2479 statements, this flag can usually be left unset.
2481 If OEP_PURE_SAME is set, then pure functions with identical arguments
2482 are considered the same. It is used when the caller has other ways
2483 to ensure that global memory is unchanged in between. */
2486 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2488 /* If either is ERROR_MARK, they aren't equal. */
2489 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2492 /* If both types don't have the same signedness, then we can't consider
2493 them equal. We must check this before the STRIP_NOPS calls
2494 because they may change the signedness of the arguments. */
2495 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2498 /* If both types don't have the same precision, then it is not safe
2500 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2506 /* In case both args are comparisons but with different comparison
2507 code, try to swap the comparison operands of one arg to produce
2508 a match and compare that variant. */
2509 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2510 && COMPARISON_CLASS_P (arg0)
2511 && COMPARISON_CLASS_P (arg1))
2513 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2515 if (TREE_CODE (arg0) == swap_code)
2516 return operand_equal_p (TREE_OPERAND (arg0, 0),
2517 TREE_OPERAND (arg1, 1), flags)
2518 && operand_equal_p (TREE_OPERAND (arg0, 1),
2519 TREE_OPERAND (arg1, 0), flags);
2522 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2523 /* This is needed for conversions and for COMPONENT_REF.
2524 Might as well play it safe and always test this. */
2525 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2526 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2527 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2530 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2531 We don't care about side effects in that case because the SAVE_EXPR
2532 takes care of that for us. In all other cases, two expressions are
2533 equal if they have no side effects. If we have two identical
2534 expressions with side effects that should be treated the same due
2535 to the only side effects being identical SAVE_EXPR's, that will
2536 be detected in the recursive calls below. */
2537 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2538 && (TREE_CODE (arg0) == SAVE_EXPR
2539 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2542 /* Next handle constant cases, those for which we can return 1 even
2543 if ONLY_CONST is set. */
2544 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2545 switch (TREE_CODE (arg0))
2548 return tree_int_cst_equal (arg0, arg1);
2551 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2552 TREE_REAL_CST (arg1));
2558 v1 = TREE_VECTOR_CST_ELTS (arg0);
2559 v2 = TREE_VECTOR_CST_ELTS (arg1);
2562 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2565 v1 = TREE_CHAIN (v1);
2566 v2 = TREE_CHAIN (v2);
2573 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2575 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2579 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2580 && ! memcmp (TREE_STRING_POINTER (arg0),
2581 TREE_STRING_POINTER (arg1),
2582 TREE_STRING_LENGTH (arg0)));
2585 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2591 if (flags & OEP_ONLY_CONST)
2594 /* Define macros to test an operand from arg0 and arg1 for equality and a
2595 variant that allows null and views null as being different from any
2596 non-null value. In the latter case, if either is null, the both
2597 must be; otherwise, do the normal comparison. */
2598 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2599 TREE_OPERAND (arg1, N), flags)
2601 #define OP_SAME_WITH_NULL(N) \
2602 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2603 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2605 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2608 /* Two conversions are equal only if signedness and modes match. */
2609 switch (TREE_CODE (arg0))
2614 case FIX_TRUNC_EXPR:
2615 case FIX_FLOOR_EXPR:
2616 case FIX_ROUND_EXPR:
2617 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2618 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2628 case tcc_comparison:
2630 if (OP_SAME (0) && OP_SAME (1))
2633 /* For commutative ops, allow the other order. */
2634 return (commutative_tree_code (TREE_CODE (arg0))
2635 && operand_equal_p (TREE_OPERAND (arg0, 0),
2636 TREE_OPERAND (arg1, 1), flags)
2637 && operand_equal_p (TREE_OPERAND (arg0, 1),
2638 TREE_OPERAND (arg1, 0), flags));
2641 /* If either of the pointer (or reference) expressions we are
2642 dereferencing contain a side effect, these cannot be equal. */
2643 if (TREE_SIDE_EFFECTS (arg0)
2644 || TREE_SIDE_EFFECTS (arg1))
2647 switch (TREE_CODE (arg0))
2650 case ALIGN_INDIRECT_REF:
2651 case MISALIGNED_INDIRECT_REF:
2657 case ARRAY_RANGE_REF:
2658 /* Operands 2 and 3 may be null. */
2661 && OP_SAME_WITH_NULL (2)
2662 && OP_SAME_WITH_NULL (3));
2665 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2666 may be NULL when we're called to compare MEM_EXPRs. */
2667 return OP_SAME_WITH_NULL (0)
2669 && OP_SAME_WITH_NULL (2);
2672 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2678 case tcc_expression:
2679 switch (TREE_CODE (arg0))
2682 case TRUTH_NOT_EXPR:
2685 case TRUTH_ANDIF_EXPR:
2686 case TRUTH_ORIF_EXPR:
2687 return OP_SAME (0) && OP_SAME (1);
2689 case TRUTH_AND_EXPR:
2691 case TRUTH_XOR_EXPR:
2692 if (OP_SAME (0) && OP_SAME (1))
2695 /* Otherwise take into account this is a commutative operation. */
2696 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2697 TREE_OPERAND (arg1, 1), flags)
2698 && operand_equal_p (TREE_OPERAND (arg0, 1),
2699 TREE_OPERAND (arg1, 0), flags));
2702 /* If the CALL_EXPRs call different functions, then they
2703 clearly can not be equal. */
2708 unsigned int cef = call_expr_flags (arg0);
2709 if (flags & OEP_PURE_SAME)
2710 cef &= ECF_CONST | ECF_PURE;
2717 /* Now see if all the arguments are the same. operand_equal_p
2718 does not handle TREE_LIST, so we walk the operands here
2719 feeding them to operand_equal_p. */
2720 arg0 = TREE_OPERAND (arg0, 1);
2721 arg1 = TREE_OPERAND (arg1, 1);
2722 while (arg0 && arg1)
2724 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2728 arg0 = TREE_CHAIN (arg0);
2729 arg1 = TREE_CHAIN (arg1);
2732 /* If we get here and both argument lists are exhausted
2733 then the CALL_EXPRs are equal. */
2734 return ! (arg0 || arg1);
2740 case tcc_declaration:
2741 /* Consider __builtin_sqrt equal to sqrt. */
2742 return (TREE_CODE (arg0) == FUNCTION_DECL
2743 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2744 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2745 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2752 #undef OP_SAME_WITH_NULL
2755 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2756 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2758 When in doubt, return 0. */
2761 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2763 int unsignedp1, unsignedpo;
2764 tree primarg0, primarg1, primother;
2765 unsigned int correct_width;
2767 if (operand_equal_p (arg0, arg1, 0))
2770 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2771 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2774 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2775 and see if the inner values are the same. This removes any
2776 signedness comparison, which doesn't matter here. */
2777 primarg0 = arg0, primarg1 = arg1;
2778 STRIP_NOPS (primarg0);
2779 STRIP_NOPS (primarg1);
2780 if (operand_equal_p (primarg0, primarg1, 0))
2783 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2784 actual comparison operand, ARG0.
2786 First throw away any conversions to wider types
2787 already present in the operands. */
2789 primarg1 = get_narrower (arg1, &unsignedp1);
2790 primother = get_narrower (other, &unsignedpo);
2792 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2793 if (unsignedp1 == unsignedpo
2794 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2795 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2797 tree type = TREE_TYPE (arg0);
2799 /* Make sure shorter operand is extended the right way
2800 to match the longer operand. */
2801 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2802 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2804 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2811 /* See if ARG is an expression that is either a comparison or is performing
2812 arithmetic on comparisons. The comparisons must only be comparing
2813 two different values, which will be stored in *CVAL1 and *CVAL2; if
2814 they are nonzero it means that some operands have already been found.
2815 No variables may be used anywhere else in the expression except in the
2816 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2817 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2819 If this is true, return 1. Otherwise, return zero. */
2822 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2824 enum tree_code code = TREE_CODE (arg);
2825 enum tree_code_class class = TREE_CODE_CLASS (code);
2827 /* We can handle some of the tcc_expression cases here. */
2828 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2830 else if (class == tcc_expression
2831 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2832 || code == COMPOUND_EXPR))
2835 else if (class == tcc_expression && code == SAVE_EXPR
2836 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2838 /* If we've already found a CVAL1 or CVAL2, this expression is
2839 two complex to handle. */
2840 if (*cval1 || *cval2)
2850 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2853 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2854 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2855 cval1, cval2, save_p));
2860 case tcc_expression:
2861 if (code == COND_EXPR)
2862 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2863 cval1, cval2, save_p)
2864 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2865 cval1, cval2, save_p)
2866 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2867 cval1, cval2, save_p));
2870 case tcc_comparison:
2871 /* First see if we can handle the first operand, then the second. For
2872 the second operand, we know *CVAL1 can't be zero. It must be that
2873 one side of the comparison is each of the values; test for the
2874 case where this isn't true by failing if the two operands
2877 if (operand_equal_p (TREE_OPERAND (arg, 0),
2878 TREE_OPERAND (arg, 1), 0))
2882 *cval1 = TREE_OPERAND (arg, 0);
2883 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2885 else if (*cval2 == 0)
2886 *cval2 = TREE_OPERAND (arg, 0);
2887 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2892 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2894 else if (*cval2 == 0)
2895 *cval2 = TREE_OPERAND (arg, 1);
2896 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2908 /* ARG is a tree that is known to contain just arithmetic operations and
2909 comparisons. Evaluate the operations in the tree substituting NEW0 for
2910 any occurrence of OLD0 as an operand of a comparison and likewise for
2914 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2916 tree type = TREE_TYPE (arg);
2917 enum tree_code code = TREE_CODE (arg);
2918 enum tree_code_class class = TREE_CODE_CLASS (code);
2920 /* We can handle some of the tcc_expression cases here. */
2921 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2923 else if (class == tcc_expression
2924 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2930 return fold_build1 (code, type,
2931 eval_subst (TREE_OPERAND (arg, 0),
2932 old0, new0, old1, new1));
2935 return fold_build2 (code, type,
2936 eval_subst (TREE_OPERAND (arg, 0),
2937 old0, new0, old1, new1),
2938 eval_subst (TREE_OPERAND (arg, 1),
2939 old0, new0, old1, new1));
2941 case tcc_expression:
2945 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2948 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2951 return fold_build3 (code, type,
2952 eval_subst (TREE_OPERAND (arg, 0),
2953 old0, new0, old1, new1),
2954 eval_subst (TREE_OPERAND (arg, 1),
2955 old0, new0, old1, new1),
2956 eval_subst (TREE_OPERAND (arg, 2),
2957 old0, new0, old1, new1));
2961 /* Fall through - ??? */
2963 case tcc_comparison:
2965 tree arg0 = TREE_OPERAND (arg, 0);
2966 tree arg1 = TREE_OPERAND (arg, 1);
2968 /* We need to check both for exact equality and tree equality. The
2969 former will be true if the operand has a side-effect. In that
2970 case, we know the operand occurred exactly once. */
2972 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2974 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2977 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2979 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2982 return fold_build2 (code, type, arg0, arg1);
2990 /* Return a tree for the case when the result of an expression is RESULT
2991 converted to TYPE and OMITTED was previously an operand of the expression
2992 but is now not needed (e.g., we folded OMITTED * 0).
2994 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2995 the conversion of RESULT to TYPE. */
2998 omit_one_operand (tree type, tree result, tree omitted)
3000 tree t = fold_convert (type, result);
3002 if (TREE_SIDE_EFFECTS (omitted))
3003 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3005 return non_lvalue (t);
3008 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3011 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3013 tree t = fold_convert (type, result);
3015 if (TREE_SIDE_EFFECTS (omitted))
3016 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3018 return pedantic_non_lvalue (t);
3021 /* Return a tree for the case when the result of an expression is RESULT
3022 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3023 of the expression but are now not needed.
3025 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3026 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3027 evaluated before OMITTED2. Otherwise, if neither has side effects,
3028 just do the conversion of RESULT to TYPE. */
3031 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3033 tree t = fold_convert (type, result);
3035 if (TREE_SIDE_EFFECTS (omitted2))
3036 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3037 if (TREE_SIDE_EFFECTS (omitted1))
3038 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3040 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3044 /* Return a simplified tree node for the truth-negation of ARG. This
3045 never alters ARG itself. We assume that ARG is an operation that
3046 returns a truth value (0 or 1).
3048 FIXME: one would think we would fold the result, but it causes
3049 problems with the dominator optimizer. */
3052 fold_truth_not_expr (tree arg)
3054 tree type = TREE_TYPE (arg);
3055 enum tree_code code = TREE_CODE (arg);
3057 /* If this is a comparison, we can simply invert it, except for
3058 floating-point non-equality comparisons, in which case we just
3059 enclose a TRUTH_NOT_EXPR around what we have. */
3061 if (TREE_CODE_CLASS (code) == tcc_comparison)
3063 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3064 if (FLOAT_TYPE_P (op_type)
3065 && flag_trapping_math
3066 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3067 && code != NE_EXPR && code != EQ_EXPR)
3071 code = invert_tree_comparison (code,
3072 HONOR_NANS (TYPE_MODE (op_type)));
3073 if (code == ERROR_MARK)
3076 return build2 (code, type,
3077 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3084 return constant_boolean_node (integer_zerop (arg), type);
3086 case TRUTH_AND_EXPR:
3087 return build2 (TRUTH_OR_EXPR, type,
3088 invert_truthvalue (TREE_OPERAND (arg, 0)),
3089 invert_truthvalue (TREE_OPERAND (arg, 1)));
3092 return build2 (TRUTH_AND_EXPR, type,
3093 invert_truthvalue (TREE_OPERAND (arg, 0)),
3094 invert_truthvalue (TREE_OPERAND (arg, 1)));
3096 case TRUTH_XOR_EXPR:
3097 /* Here we can invert either operand. We invert the first operand
3098 unless the second operand is a TRUTH_NOT_EXPR in which case our
3099 result is the XOR of the first operand with the inside of the
3100 negation of the second operand. */
3102 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3103 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3104 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3106 return build2 (TRUTH_XOR_EXPR, type,
3107 invert_truthvalue (TREE_OPERAND (arg, 0)),
3108 TREE_OPERAND (arg, 1));
3110 case TRUTH_ANDIF_EXPR:
3111 return build2 (TRUTH_ORIF_EXPR, type,
3112 invert_truthvalue (TREE_OPERAND (arg, 0)),
3113 invert_truthvalue (TREE_OPERAND (arg, 1)));
3115 case TRUTH_ORIF_EXPR:
3116 return build2 (TRUTH_ANDIF_EXPR, type,
3117 invert_truthvalue (TREE_OPERAND (arg, 0)),
3118 invert_truthvalue (TREE_OPERAND (arg, 1)));
3120 case TRUTH_NOT_EXPR:
3121 return TREE_OPERAND (arg, 0);
3125 tree arg1 = TREE_OPERAND (arg, 1);
3126 tree arg2 = TREE_OPERAND (arg, 2);
3127 /* A COND_EXPR may have a throw as one operand, which
3128 then has void type. Just leave void operands
3130 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3131 VOID_TYPE_P (TREE_TYPE (arg1))
3132 ? arg1 : invert_truthvalue (arg1),
3133 VOID_TYPE_P (TREE_TYPE (arg2))
3134 ? arg2 : invert_truthvalue (arg2));
3138 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3139 invert_truthvalue (TREE_OPERAND (arg, 1)));
3141 case NON_LVALUE_EXPR:
3142 return invert_truthvalue (TREE_OPERAND (arg, 0));
3145 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3146 return build1 (TRUTH_NOT_EXPR, type, arg);
3150 return build1 (TREE_CODE (arg), type,
3151 invert_truthvalue (TREE_OPERAND (arg, 0)));
3154 if (!integer_onep (TREE_OPERAND (arg, 1)))
3156 return build2 (EQ_EXPR, type, arg,
3157 build_int_cst (type, 0));
3160 return build1 (TRUTH_NOT_EXPR, type, arg);
3162 case CLEANUP_POINT_EXPR:
3163 return build1 (CLEANUP_POINT_EXPR, type,
3164 invert_truthvalue (TREE_OPERAND (arg, 0)));
3173 /* Return a simplified tree node for the truth-negation of ARG. This
3174 never alters ARG itself. We assume that ARG is an operation that
3175 returns a truth value (0 or 1).
3177 FIXME: one would think we would fold the result, but it causes
3178 problems with the dominator optimizer. */
3181 invert_truthvalue (tree arg)
3185 if (TREE_CODE (arg) == ERROR_MARK)
3188 tem = fold_truth_not_expr (arg);
3190 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3195 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3196 operands are another bit-wise operation with a common input. If so,
3197 distribute the bit operations to save an operation and possibly two if
3198 constants are involved. For example, convert
3199 (A | B) & (A | C) into A | (B & C)
3200 Further simplification will occur if B and C are constants.
3202 If this optimization cannot be done, 0 will be returned. */
3205 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3210 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3211 || TREE_CODE (arg0) == code
3212 || (TREE_CODE (arg0) != BIT_AND_EXPR
3213 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3216 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3218 common = TREE_OPERAND (arg0, 0);
3219 left = TREE_OPERAND (arg0, 1);
3220 right = TREE_OPERAND (arg1, 1);
3222 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3224 common = TREE_OPERAND (arg0, 0);
3225 left = TREE_OPERAND (arg0, 1);
3226 right = TREE_OPERAND (arg1, 0);
3228 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3230 common = TREE_OPERAND (arg0, 1);
3231 left = TREE_OPERAND (arg0, 0);
3232 right = TREE_OPERAND (arg1, 1);
3234 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3236 common = TREE_OPERAND (arg0, 1);
3237 left = TREE_OPERAND (arg0, 0);
3238 right = TREE_OPERAND (arg1, 0);
3243 return fold_build2 (TREE_CODE (arg0), type, common,
3244 fold_build2 (code, type, left, right));
3247 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3248 with code CODE. This optimization is unsafe. */
3250 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3252 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3253 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3255 /* (A / C) +- (B / C) -> (A +- B) / C. */
3257 && operand_equal_p (TREE_OPERAND (arg0, 1),
3258 TREE_OPERAND (arg1, 1), 0))
3259 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3260 fold_build2 (code, type,
3261 TREE_OPERAND (arg0, 0),
3262 TREE_OPERAND (arg1, 0)),
3263 TREE_OPERAND (arg0, 1));
3265 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3266 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3267 TREE_OPERAND (arg1, 0), 0)
3268 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3269 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3271 REAL_VALUE_TYPE r0, r1;
3272 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3273 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3275 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3277 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3278 real_arithmetic (&r0, code, &r0, &r1);
3279 return fold_build2 (MULT_EXPR, type,
3280 TREE_OPERAND (arg0, 0),
3281 build_real (type, r0));
3287 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3288 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3291 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3298 tree size = TYPE_SIZE (TREE_TYPE (inner));
3299 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3300 || POINTER_TYPE_P (TREE_TYPE (inner)))
3301 && host_integerp (size, 0)
3302 && tree_low_cst (size, 0) == bitsize)
3303 return fold_convert (type, inner);
3306 result = build3 (BIT_FIELD_REF, type, inner,
3307 size_int (bitsize), bitsize_int (bitpos));
3309 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3314 /* Optimize a bit-field compare.
3316 There are two cases: First is a compare against a constant and the
3317 second is a comparison of two items where the fields are at the same
3318 bit position relative to the start of a chunk (byte, halfword, word)
3319 large enough to contain it. In these cases we can avoid the shift
3320 implicit in bitfield extractions.
3322 For constants, we emit a compare of the shifted constant with the
3323 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3324 compared. For two fields at the same position, we do the ANDs with the
3325 similar mask and compare the result of the ANDs.
3327 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3328 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3329 are the left and right operands of the comparison, respectively.
3331 If the optimization described above can be done, we return the resulting
3332 tree. Otherwise we return zero. */
3335 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3338 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3339 tree type = TREE_TYPE (lhs);
3340 tree signed_type, unsigned_type;
3341 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3342 enum machine_mode lmode, rmode, nmode;
3343 int lunsignedp, runsignedp;
3344 int lvolatilep = 0, rvolatilep = 0;
3345 tree linner, rinner = NULL_TREE;
3349 /* Get all the information about the extractions being done. If the bit size
3350 if the same as the size of the underlying object, we aren't doing an
3351 extraction at all and so can do nothing. We also don't want to
3352 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3353 then will no longer be able to replace it. */
3354 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3355 &lunsignedp, &lvolatilep, false);
3356 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3357 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3362 /* If this is not a constant, we can only do something if bit positions,
3363 sizes, and signedness are the same. */
3364 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3365 &runsignedp, &rvolatilep, false);
3367 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3368 || lunsignedp != runsignedp || offset != 0
3369 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3373 /* See if we can find a mode to refer to this field. We should be able to,
3374 but fail if we can't. */
3375 nmode = get_best_mode (lbitsize, lbitpos,
3376 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3377 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3378 TYPE_ALIGN (TREE_TYPE (rinner))),
3379 word_mode, lvolatilep || rvolatilep);
3380 if (nmode == VOIDmode)
3383 /* Set signed and unsigned types of the precision of this mode for the
3385 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3386 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3388 /* Compute the bit position and size for the new reference and our offset
3389 within it. If the new reference is the same size as the original, we
3390 won't optimize anything, so return zero. */
3391 nbitsize = GET_MODE_BITSIZE (nmode);
3392 nbitpos = lbitpos & ~ (nbitsize - 1);
3394 if (nbitsize == lbitsize)
3397 if (BYTES_BIG_ENDIAN)
3398 lbitpos = nbitsize - lbitsize - lbitpos;
3400 /* Make the mask to be used against the extracted field. */
3401 mask = build_int_cst (unsigned_type, -1);
3402 mask = force_fit_type (mask, 0, false, false);
3403 mask = fold_convert (unsigned_type, mask);
3404 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3405 mask = const_binop (RSHIFT_EXPR, mask,
3406 size_int (nbitsize - lbitsize - lbitpos), 0);
3409 /* If not comparing with constant, just rework the comparison
3411 return build2 (code, compare_type,
3412 build2 (BIT_AND_EXPR, unsigned_type,
3413 make_bit_field_ref (linner, unsigned_type,
3414 nbitsize, nbitpos, 1),
3416 build2 (BIT_AND_EXPR, unsigned_type,
3417 make_bit_field_ref (rinner, unsigned_type,
3418 nbitsize, nbitpos, 1),
3421 /* Otherwise, we are handling the constant case. See if the constant is too
3422 big for the field. Warn and return a tree of for 0 (false) if so. We do
3423 this not only for its own sake, but to avoid having to test for this
3424 error case below. If we didn't, we might generate wrong code.
3426 For unsigned fields, the constant shifted right by the field length should
3427 be all zero. For signed fields, the high-order bits should agree with
3432 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3433 fold_convert (unsigned_type, rhs),
3434 size_int (lbitsize), 0)))
3436 warning (0, "comparison is always %d due to width of bit-field",
3438 return constant_boolean_node (code == NE_EXPR, compare_type);
3443 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3444 size_int (lbitsize - 1), 0);
3445 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3447 warning (0, "comparison is always %d due to width of bit-field",
3449 return constant_boolean_node (code == NE_EXPR, compare_type);
3453 /* Single-bit compares should always be against zero. */
3454 if (lbitsize == 1 && ! integer_zerop (rhs))
3456 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3457 rhs = build_int_cst (type, 0);
3460 /* Make a new bitfield reference, shift the constant over the
3461 appropriate number of bits and mask it with the computed mask
3462 (in case this was a signed field). If we changed it, make a new one. */
3463 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3466 TREE_SIDE_EFFECTS (lhs) = 1;
3467 TREE_THIS_VOLATILE (lhs) = 1;
3470 rhs = const_binop (BIT_AND_EXPR,
3471 const_binop (LSHIFT_EXPR,
3472 fold_convert (unsigned_type, rhs),
3473 size_int (lbitpos), 0),
3476 return build2 (code, compare_type,
3477 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3481 /* Subroutine for fold_truthop: decode a field reference.
3483 If EXP is a comparison reference, we return the innermost reference.
3485 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3486 set to the starting bit number.
3488 If the innermost field can be completely contained in a mode-sized
3489 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3491 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3492 otherwise it is not changed.
3494 *PUNSIGNEDP is set to the signedness of the field.
3496 *PMASK is set to the mask used. This is either contained in a
3497 BIT_AND_EXPR or derived from the width of the field.
3499 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3501 Return 0 if this is not a component reference or is one that we can't
3502 do anything with. */
3505 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3506 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3507 int *punsignedp, int *pvolatilep,
3508 tree *pmask, tree *pand_mask)
3510 tree outer_type = 0;
3512 tree mask, inner, offset;
3514 unsigned int precision;
3516 /* All the optimizations using this function assume integer fields.
3517 There are problems with FP fields since the type_for_size call
3518 below can fail for, e.g., XFmode. */
3519 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3522 /* We are interested in the bare arrangement of bits, so strip everything
3523 that doesn't affect the machine mode. However, record the type of the
3524 outermost expression if it may matter below. */
3525 if (TREE_CODE (exp) == NOP_EXPR
3526 || TREE_CODE (exp) == CONVERT_EXPR
3527 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3528 outer_type = TREE_TYPE (exp);
3531 if (TREE_CODE (exp) == BIT_AND_EXPR)
3533 and_mask = TREE_OPERAND (exp, 1);
3534 exp = TREE_OPERAND (exp, 0);
3535 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3536 if (TREE_CODE (and_mask) != INTEGER_CST)
3540 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3541 punsignedp, pvolatilep, false);
3542 if ((inner == exp && and_mask == 0)
3543 || *pbitsize < 0 || offset != 0
3544 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3547 /* If the number of bits in the reference is the same as the bitsize of
3548 the outer type, then the outer type gives the signedness. Otherwise
3549 (in case of a small bitfield) the signedness is unchanged. */
3550 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3551 *punsignedp = TYPE_UNSIGNED (outer_type);
3553 /* Compute the mask to access the bitfield. */
3554 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3555 precision = TYPE_PRECISION (unsigned_type);
3557 mask = build_int_cst (unsigned_type, -1);
3558 mask = force_fit_type (mask, 0, false, false);
3560 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3561 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3563 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3565 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3566 fold_convert (unsigned_type, and_mask), mask);
3569 *pand_mask = and_mask;
3573 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3577 all_ones_mask_p (tree mask, int size)
3579 tree type = TREE_TYPE (mask);
3580 unsigned int precision = TYPE_PRECISION (type);
3583 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3584 tmask = force_fit_type (tmask, 0, false, false);
3587 tree_int_cst_equal (mask,
3588 const_binop (RSHIFT_EXPR,
3589 const_binop (LSHIFT_EXPR, tmask,
3590 size_int (precision - size),
3592 size_int (precision - size), 0));
3595 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3596 represents the sign bit of EXP's type. If EXP represents a sign
3597 or zero extension, also test VAL against the unextended type.
3598 The return value is the (sub)expression whose sign bit is VAL,
3599 or NULL_TREE otherwise. */
3602 sign_bit_p (tree exp, tree val)
3604 unsigned HOST_WIDE_INT mask_lo, lo;
3605 HOST_WIDE_INT mask_hi, hi;
3609 /* Tree EXP must have an integral type. */
3610 t = TREE_TYPE (exp);
3611 if (! INTEGRAL_TYPE_P (t))
3614 /* Tree VAL must be an integer constant. */
3615 if (TREE_CODE (val) != INTEGER_CST
3616 || TREE_CONSTANT_OVERFLOW (val))
3619 width = TYPE_PRECISION (t);
3620 if (width > HOST_BITS_PER_WIDE_INT)
3622 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3625 mask_hi = ((unsigned HOST_WIDE_INT) -1
3626 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3632 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3635 mask_lo = ((unsigned HOST_WIDE_INT) -1
3636 >> (HOST_BITS_PER_WIDE_INT - width));
3639 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3640 treat VAL as if it were unsigned. */
3641 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3642 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3645 /* Handle extension from a narrower type. */
3646 if (TREE_CODE (exp) == NOP_EXPR
3647 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3648 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3653 /* Subroutine for fold_truthop: determine if an operand is simple enough
3654 to be evaluated unconditionally. */
3657 simple_operand_p (tree exp)
3659 /* Strip any conversions that don't change the machine mode. */
3662 return (CONSTANT_CLASS_P (exp)
3663 || TREE_CODE (exp) == SSA_NAME
3665 && ! TREE_ADDRESSABLE (exp)
3666 && ! TREE_THIS_VOLATILE (exp)
3667 && ! DECL_NONLOCAL (exp)
3668 /* Don't regard global variables as simple. They may be
3669 allocated in ways unknown to the compiler (shared memory,
3670 #pragma weak, etc). */
3671 && ! TREE_PUBLIC (exp)
3672 && ! DECL_EXTERNAL (exp)
3673 /* Loading a static variable is unduly expensive, but global
3674 registers aren't expensive. */
3675 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3678 /* The following functions are subroutines to fold_range_test and allow it to
3679 try to change a logical combination of comparisons into a range test.
3682 X == 2 || X == 3 || X == 4 || X == 5
3686 (unsigned) (X - 2) <= 3
3688 We describe each set of comparisons as being either inside or outside
3689 a range, using a variable named like IN_P, and then describe the
3690 range with a lower and upper bound. If one of the bounds is omitted,
3691 it represents either the highest or lowest value of the type.
3693 In the comments below, we represent a range by two numbers in brackets
3694 preceded by a "+" to designate being inside that range, or a "-" to
3695 designate being outside that range, so the condition can be inverted by
3696 flipping the prefix. An omitted bound is represented by a "-". For
3697 example, "- [-, 10]" means being outside the range starting at the lowest
3698 possible value and ending at 10, in other words, being greater than 10.
3699 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3702 We set up things so that the missing bounds are handled in a consistent
3703 manner so neither a missing bound nor "true" and "false" need to be
3704 handled using a special case. */
3706 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3707 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3708 and UPPER1_P are nonzero if the respective argument is an upper bound
3709 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3710 must be specified for a comparison. ARG1 will be converted to ARG0's
3711 type if both are specified. */
3714 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3715 tree arg1, int upper1_p)
3721 /* If neither arg represents infinity, do the normal operation.
3722 Else, if not a comparison, return infinity. Else handle the special
3723 comparison rules. Note that most of the cases below won't occur, but
3724 are handled for consistency. */
3726 if (arg0 != 0 && arg1 != 0)
3728 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3729 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3731 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3734 if (TREE_CODE_CLASS (code) != tcc_comparison)
3737 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3738 for neither. In real maths, we cannot assume open ended ranges are
3739 the same. But, this is computer arithmetic, where numbers are finite.
3740 We can therefore make the transformation of any unbounded range with
3741 the value Z, Z being greater than any representable number. This permits
3742 us to treat unbounded ranges as equal. */
3743 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3744 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3748 result = sgn0 == sgn1;
3751 result = sgn0 != sgn1;
3754 result = sgn0 < sgn1;
3757 result = sgn0 <= sgn1;
3760 result = sgn0 > sgn1;
3763 result = sgn0 >= sgn1;
3769 return constant_boolean_node (result, type);
3772 /* Given EXP, a logical expression, set the range it is testing into
3773 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3774 actually being tested. *PLOW and *PHIGH will be made of the same type
3775 as the returned expression. If EXP is not a comparison, we will most
3776 likely not be returning a useful value and range. */
3779 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3781 enum tree_code code;
3782 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3783 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3785 tree low, high, n_low, n_high;
3787 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3788 and see if we can refine the range. Some of the cases below may not
3789 happen, but it doesn't seem worth worrying about this. We "continue"
3790 the outer loop when we've changed something; otherwise we "break"
3791 the switch, which will "break" the while. */
3794 low = high = build_int_cst (TREE_TYPE (exp), 0);
3798 code = TREE_CODE (exp);
3799 exp_type = TREE_TYPE (exp);
3801 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3803 if (TREE_CODE_LENGTH (code) > 0)
3804 arg0 = TREE_OPERAND (exp, 0);
3805 if (TREE_CODE_CLASS (code) == tcc_comparison
3806 || TREE_CODE_CLASS (code) == tcc_unary
3807 || TREE_CODE_CLASS (code) == tcc_binary)
3808 arg0_type = TREE_TYPE (arg0);
3809 if (TREE_CODE_CLASS (code) == tcc_binary
3810 || TREE_CODE_CLASS (code) == tcc_comparison
3811 || (TREE_CODE_CLASS (code) == tcc_expression
3812 && TREE_CODE_LENGTH (code) > 1))
3813 arg1 = TREE_OPERAND (exp, 1);
3818 case TRUTH_NOT_EXPR:
3819 in_p = ! in_p, exp = arg0;
3822 case EQ_EXPR: case NE_EXPR:
3823 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3824 /* We can only do something if the range is testing for zero
3825 and if the second operand is an integer constant. Note that
3826 saying something is "in" the range we make is done by
3827 complementing IN_P since it will set in the initial case of
3828 being not equal to zero; "out" is leaving it alone. */
3829 if (low == 0 || high == 0
3830 || ! integer_zerop (low) || ! integer_zerop (high)
3831 || TREE_CODE (arg1) != INTEGER_CST)
3836 case NE_EXPR: /* - [c, c] */
3839 case EQ_EXPR: /* + [c, c] */
3840 in_p = ! in_p, low = high = arg1;
3842 case GT_EXPR: /* - [-, c] */
3843 low = 0, high = arg1;
3845 case GE_EXPR: /* + [c, -] */
3846 in_p = ! in_p, low = arg1, high = 0;
3848 case LT_EXPR: /* - [c, -] */
3849 low = arg1, high = 0;
3851 case LE_EXPR: /* + [-, c] */
3852 in_p = ! in_p, low = 0, high = arg1;
3858 /* If this is an unsigned comparison, we also know that EXP is
3859 greater than or equal to zero. We base the range tests we make
3860 on that fact, so we record it here so we can parse existing
3861 range tests. We test arg0_type since often the return type
3862 of, e.g. EQ_EXPR, is boolean. */
3863 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3865 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3867 build_int_cst (arg0_type, 0),
3871 in_p = n_in_p, low = n_low, high = n_high;
3873 /* If the high bound is missing, but we have a nonzero low
3874 bound, reverse the range so it goes from zero to the low bound
3876 if (high == 0 && low && ! integer_zerop (low))
3879 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3880 integer_one_node, 0);
3881 low = build_int_cst (arg0_type, 0);
3889 /* (-x) IN [a,b] -> x in [-b, -a] */
3890 n_low = range_binop (MINUS_EXPR, exp_type,
3891 build_int_cst (exp_type, 0),
3893 n_high = range_binop (MINUS_EXPR, exp_type,
3894 build_int_cst (exp_type, 0),
3896 low = n_low, high = n_high;
3902 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3903 build_int_cst (exp_type, 1));
3906 case PLUS_EXPR: case MINUS_EXPR:
3907 if (TREE_CODE (arg1) != INTEGER_CST)
3910 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3911 move a constant to the other side. */
3912 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3915 /* If EXP is signed, any overflow in the computation is undefined,
3916 so we don't worry about it so long as our computations on
3917 the bounds don't overflow. For unsigned, overflow is defined
3918 and this is exactly the right thing. */
3919 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3920 arg0_type, low, 0, arg1, 0);
3921 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3922 arg0_type, high, 1, arg1, 0);
3923 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3924 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3927 /* Check for an unsigned range which has wrapped around the maximum
3928 value thus making n_high < n_low, and normalize it. */
3929 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3931 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3932 integer_one_node, 0);
3933 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3934 integer_one_node, 0);
3936 /* If the range is of the form +/- [ x+1, x ], we won't
3937 be able to normalize it. But then, it represents the
3938 whole range or the empty set, so make it
3940 if (tree_int_cst_equal (n_low, low)
3941 && tree_int_cst_equal (n_high, high))
3947 low = n_low, high = n_high;
3952 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3953 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3956 if (! INTEGRAL_TYPE_P (arg0_type)
3957 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3958 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3961 n_low = low, n_high = high;
3964 n_low = fold_convert (arg0_type, n_low);
3967 n_high = fold_convert (arg0_type, n_high);
3970 /* If we're converting arg0 from an unsigned type, to exp,
3971 a signed type, we will be doing the comparison as unsigned.
3972 The tests above have already verified that LOW and HIGH
3975 So we have to ensure that we will handle large unsigned
3976 values the same way that the current signed bounds treat
3979 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3982 tree equiv_type = lang_hooks.types.type_for_mode
3983 (TYPE_MODE (arg0_type), 1);
3985 /* A range without an upper bound is, naturally, unbounded.
3986 Since convert would have cropped a very large value, use
3987 the max value for the destination type. */
3989 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3990 : TYPE_MAX_VALUE (arg0_type);
3992 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3993 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3994 fold_convert (arg0_type,
3996 fold_convert (arg0_type,
3999 /* If the low bound is specified, "and" the range with the
4000 range for which the original unsigned value will be
4004 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4005 1, n_low, n_high, 1,
4006 fold_convert (arg0_type,
4011 in_p = (n_in_p == in_p);
4015 /* Otherwise, "or" the range with the range of the input
4016 that will be interpreted as negative. */
4017 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4018 0, n_low, n_high, 1,
4019 fold_convert (arg0_type,
4024 in_p = (in_p != n_in_p);
4029 low = n_low, high = n_high;
4039 /* If EXP is a constant, we can evaluate whether this is true or false. */
4040 if (TREE_CODE (exp) == INTEGER_CST)
4042 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4044 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4050 *pin_p = in_p, *plow = low, *phigh = high;
4054 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4055 type, TYPE, return an expression to test if EXP is in (or out of, depending
4056 on IN_P) the range. Return 0 if the test couldn't be created. */
4059 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4061 tree etype = TREE_TYPE (exp);
4064 #ifdef HAVE_canonicalize_funcptr_for_compare
4065 /* Disable this optimization for function pointer expressions
4066 on targets that require function pointer canonicalization. */
4067 if (HAVE_canonicalize_funcptr_for_compare
4068 && TREE_CODE (etype) == POINTER_TYPE
4069 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4075 value = build_range_check (type, exp, 1, low, high);
4077 return invert_truthvalue (value);
4082 if (low == 0 && high == 0)
4083 return build_int_cst (type, 1);
4086 return fold_build2 (LE_EXPR, type, exp,
4087 fold_convert (etype, high));
4090 return fold_build2 (GE_EXPR, type, exp,
4091 fold_convert (etype, low));
4093 if (operand_equal_p (low, high, 0))
4094 return fold_build2 (EQ_EXPR, type, exp,
4095 fold_convert (etype, low));
4097 if (integer_zerop (low))
4099 if (! TYPE_UNSIGNED (etype))
4101 etype = lang_hooks.types.unsigned_type (etype);
4102 high = fold_convert (etype, high);
4103 exp = fold_convert (etype, exp);
4105 return build_range_check (type, exp, 1, 0, high);
4108 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4109 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4111 unsigned HOST_WIDE_INT lo;
4115 prec = TYPE_PRECISION (etype);
4116 if (prec <= HOST_BITS_PER_WIDE_INT)
4119 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4123 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4124 lo = (unsigned HOST_WIDE_INT) -1;
4127 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4129 if (TYPE_UNSIGNED (etype))
4131 etype = lang_hooks.types.signed_type (etype);
4132 exp = fold_convert (etype, exp);
4134 return fold_build2 (GT_EXPR, type, exp,
4135 build_int_cst (etype, 0));
4139 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4140 This requires wrap-around arithmetics for the type of the expression. */
4141 switch (TREE_CODE (etype))
4144 /* There is no requirement that LOW be within the range of ETYPE
4145 if the latter is a subtype. It must, however, be within the base
4146 type of ETYPE. So be sure we do the subtraction in that type. */
4147 if (TREE_TYPE (etype))
4148 etype = TREE_TYPE (etype);
4153 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4154 TYPE_UNSIGNED (etype));
4161 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4162 if (TREE_CODE (etype) == INTEGER_TYPE
4163 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4165 tree utype, minv, maxv;
4167 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4168 for the type in question, as we rely on this here. */
4169 utype = lang_hooks.types.unsigned_type (etype);
4170 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4171 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4172 integer_one_node, 1);
4173 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4175 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4182 high = fold_convert (etype, high);
4183 low = fold_convert (etype, low);
4184 exp = fold_convert (etype, exp);
4186 value = const_binop (MINUS_EXPR, high, low, 0);
4188 if (value != 0 && !TREE_OVERFLOW (value))
4189 return build_range_check (type,
4190 fold_build2 (MINUS_EXPR, etype, exp, low),
4191 1, build_int_cst (etype, 0), value);
4196 /* Return the predecessor of VAL in its type, handling the infinite case. */
4199 range_predecessor (tree val)
4201 tree type = TREE_TYPE (val);
4203 if (INTEGRAL_TYPE_P (type)
4204 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4207 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4210 /* Return the successor of VAL in its type, handling the infinite case. */
4213 range_successor (tree val)
4215 tree type = TREE_TYPE (val);
4217 if (INTEGRAL_TYPE_P (type)
4218 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4221 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4224 /* Given two ranges, see if we can merge them into one. Return 1 if we
4225 can, 0 if we can't. Set the output range into the specified parameters. */
4228 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4229 tree high0, int in1_p, tree low1, tree high1)
4237 int lowequal = ((low0 == 0 && low1 == 0)
4238 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4239 low0, 0, low1, 0)));
4240 int highequal = ((high0 == 0 && high1 == 0)
4241 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4242 high0, 1, high1, 1)));
4244 /* Make range 0 be the range that starts first, or ends last if they
4245 start at the same value. Swap them if it isn't. */
4246 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4249 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4250 high1, 1, high0, 1))))
4252 temp = in0_p, in0_p = in1_p, in1_p = temp;
4253 tem = low0, low0 = low1, low1 = tem;
4254 tem = high0, high0 = high1, high1 = tem;
4257 /* Now flag two cases, whether the ranges are disjoint or whether the
4258 second range is totally subsumed in the first. Note that the tests
4259 below are simplified by the ones above. */
4260 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4261 high0, 1, low1, 0));
4262 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4263 high1, 1, high0, 1));
4265 /* We now have four cases, depending on whether we are including or
4266 excluding the two ranges. */
4269 /* If they don't overlap, the result is false. If the second range
4270 is a subset it is the result. Otherwise, the range is from the start
4271 of the second to the end of the first. */
4273 in_p = 0, low = high = 0;
4275 in_p = 1, low = low1, high = high1;
4277 in_p = 1, low = low1, high = high0;
4280 else if (in0_p && ! in1_p)
4282 /* If they don't overlap, the result is the first range. If they are
4283 equal, the result is false. If the second range is a subset of the
4284 first, and the ranges begin at the same place, we go from just after
4285 the end of the second range to the end of the first. If the second
4286 range is not a subset of the first, or if it is a subset and both
4287 ranges end at the same place, the range starts at the start of the
4288 first range and ends just before the second range.
4289 Otherwise, we can't describe this as a single range. */
4291 in_p = 1, low = low0, high = high0;
4292 else if (lowequal && highequal)
4293 in_p = 0, low = high = 0;
4294 else if (subset && lowequal)
4296 low = range_successor (high1);
4300 else if (! subset || highequal)
4303 high = range_predecessor (low1);
4310 else if (! in0_p && in1_p)
4312 /* If they don't overlap, the result is the second range. If the second
4313 is a subset of the first, the result is false. Otherwise,
4314 the range starts just after the first range and ends at the
4315 end of the second. */
4317 in_p = 1, low = low1, high = high1;
4318 else if (subset || highequal)
4319 in_p = 0, low = high = 0;
4322 low = range_successor (high0);
4330 /* The case where we are excluding both ranges. Here the complex case
4331 is if they don't overlap. In that case, the only time we have a
4332 range is if they are adjacent. If the second is a subset of the
4333 first, the result is the first. Otherwise, the range to exclude
4334 starts at the beginning of the first range and ends at the end of the
4338 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4339 range_successor (high0),
4341 in_p = 0, low = low0, high = high1;
4344 /* Canonicalize - [min, x] into - [-, x]. */
4345 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4346 switch (TREE_CODE (TREE_TYPE (low0)))
4349 if (TYPE_PRECISION (TREE_TYPE (low0))
4350 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4354 if (tree_int_cst_equal (low0,
4355 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4359 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4360 && integer_zerop (low0))
4367 /* Canonicalize - [x, max] into - [x, -]. */
4368 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4369 switch (TREE_CODE (TREE_TYPE (high1)))
4372 if (TYPE_PRECISION (TREE_TYPE (high1))
4373 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4377 if (tree_int_cst_equal (high1,
4378 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4382 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4383 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4385 integer_one_node, 1)))
4392 /* The ranges might be also adjacent between the maximum and
4393 minimum values of the given type. For
4394 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4395 return + [x + 1, y - 1]. */
4396 if (low0 == 0 && high1 == 0)
4398 low = range_successor (high0);
4399 high = range_predecessor (low1);
4400 if (low == 0 || high == 0)
4410 in_p = 0, low = low0, high = high0;
4412 in_p = 0, low = low0, high = high1;
4415 *pin_p = in_p, *plow = low, *phigh = high;
4420 /* Subroutine of fold, looking inside expressions of the form
4421 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4422 of the COND_EXPR. This function is being used also to optimize
4423 A op B ? C : A, by reversing the comparison first.
4425 Return a folded expression whose code is not a COND_EXPR
4426 anymore, or NULL_TREE if no folding opportunity is found. */
4429 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4431 enum tree_code comp_code = TREE_CODE (arg0);
4432 tree arg00 = TREE_OPERAND (arg0, 0);
4433 tree arg01 = TREE_OPERAND (arg0, 1);
4434 tree arg1_type = TREE_TYPE (arg1);
4440 /* If we have A op 0 ? A : -A, consider applying the following
4443 A == 0? A : -A same as -A
4444 A != 0? A : -A same as A
4445 A >= 0? A : -A same as abs (A)
4446 A > 0? A : -A same as abs (A)
4447 A <= 0? A : -A same as -abs (A)
4448 A < 0? A : -A same as -abs (A)
4450 None of these transformations work for modes with signed
4451 zeros. If A is +/-0, the first two transformations will
4452 change the sign of the result (from +0 to -0, or vice
4453 versa). The last four will fix the sign of the result,
4454 even though the original expressions could be positive or
4455 negative, depending on the sign of A.
4457 Note that all these transformations are correct if A is
4458 NaN, since the two alternatives (A and -A) are also NaNs. */
4459 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4460 ? real_zerop (arg01)
4461 : integer_zerop (arg01))
4462 && ((TREE_CODE (arg2) == NEGATE_EXPR
4463 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4464 /* In the case that A is of the form X-Y, '-A' (arg2) may
4465 have already been folded to Y-X, check for that. */
4466 || (TREE_CODE (arg1) == MINUS_EXPR
4467 && TREE_CODE (arg2) == MINUS_EXPR
4468 && operand_equal_p (TREE_OPERAND (arg1, 0),
4469 TREE_OPERAND (arg2, 1), 0)
4470 && operand_equal_p (TREE_OPERAND (arg1, 1),
4471 TREE_OPERAND (arg2, 0), 0))))
4476 tem = fold_convert (arg1_type, arg1);
4477 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4480 return pedantic_non_lvalue (fold_convert (type, arg1));
4483 if (flag_trapping_math)
4488 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4489 arg1 = fold_convert (lang_hooks.types.signed_type
4490 (TREE_TYPE (arg1)), arg1);
4491 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4492 return pedantic_non_lvalue (fold_convert (type, tem));
4495 if (flag_trapping_math)
4499 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4500 arg1 = fold_convert (lang_hooks.types.signed_type
4501 (TREE_TYPE (arg1)), arg1);
4502 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4503 return negate_expr (fold_convert (type, tem));
4505 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4509 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4510 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4511 both transformations are correct when A is NaN: A != 0
4512 is then true, and A == 0 is false. */
4514 if (integer_zerop (arg01) && integer_zerop (arg2))
4516 if (comp_code == NE_EXPR)
4517 return pedantic_non_lvalue (fold_convert (type, arg1));
4518 else if (comp_code == EQ_EXPR)
4519 return build_int_cst (type, 0);
4522 /* Try some transformations of A op B ? A : B.
4524 A == B? A : B same as B
4525 A != B? A : B same as A
4526 A >= B? A : B same as max (A, B)
4527 A > B? A : B same as max (B, A)
4528 A <= B? A : B same as min (A, B)
4529 A < B? A : B same as min (B, A)
4531 As above, these transformations don't work in the presence
4532 of signed zeros. For example, if A and B are zeros of
4533 opposite sign, the first two transformations will change
4534 the sign of the result. In the last four, the original
4535 expressions give different results for (A=+0, B=-0) and
4536 (A=-0, B=+0), but the transformed expressions do not.
4538 The first two transformations are correct if either A or B
4539 is a NaN. In the first transformation, the condition will
4540 be false, and B will indeed be chosen. In the case of the
4541 second transformation, the condition A != B will be true,
4542 and A will be chosen.
4544 The conversions to max() and min() are not correct if B is
4545 a number and A is not. The conditions in the original
4546 expressions will be false, so all four give B. The min()
4547 and max() versions would give a NaN instead. */
4548 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4549 /* Avoid these transformations if the COND_EXPR may be used
4550 as an lvalue in the C++ front-end. PR c++/19199. */
4552 || (strcmp (lang_hooks.name, "GNU C++") != 0
4553 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4554 || ! maybe_lvalue_p (arg1)
4555 || ! maybe_lvalue_p (arg2)))
4557 tree comp_op0 = arg00;
4558 tree comp_op1 = arg01;
4559 tree comp_type = TREE_TYPE (comp_op0);
4561 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4562 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4572 return pedantic_non_lvalue (fold_convert (type, arg2));
4574 return pedantic_non_lvalue (fold_convert (type, arg1));
4579 /* In C++ a ?: expression can be an lvalue, so put the
4580 operand which will be used if they are equal first
4581 so that we can convert this back to the
4582 corresponding COND_EXPR. */
4583 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4585 comp_op0 = fold_convert (comp_type, comp_op0);
4586 comp_op1 = fold_convert (comp_type, comp_op1);
4587 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4588 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4589 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4590 return pedantic_non_lvalue (fold_convert (type, tem));
4597 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4599 comp_op0 = fold_convert (comp_type, comp_op0);
4600 comp_op1 = fold_convert (comp_type, comp_op1);
4601 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4602 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4603 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4604 return pedantic_non_lvalue (fold_convert (type, tem));
4608 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4609 return pedantic_non_lvalue (fold_convert (type, arg2));
4612 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4613 return pedantic_non_lvalue (fold_convert (type, arg1));
4616 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4621 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4622 we might still be able to simplify this. For example,
4623 if C1 is one less or one more than C2, this might have started
4624 out as a MIN or MAX and been transformed by this function.
4625 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4627 if (INTEGRAL_TYPE_P (type)
4628 && TREE_CODE (arg01) == INTEGER_CST
4629 && TREE_CODE (arg2) == INTEGER_CST)
4633 /* We can replace A with C1 in this case. */
4634 arg1 = fold_convert (type, arg01);
4635 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4638 /* If C1 is C2 + 1, this is min(A, C2). */
4639 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4641 && operand_equal_p (arg01,
4642 const_binop (PLUS_EXPR, arg2,
4643 integer_one_node, 0),
4645 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4650 /* If C1 is C2 - 1, this is min(A, C2). */
4651 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4653 && operand_equal_p (arg01,
4654 const_binop (MINUS_EXPR, arg2,
4655 integer_one_node, 0),
4657 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4662 /* If C1 is C2 - 1, this is max(A, C2). */
4663 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4665 && operand_equal_p (arg01,
4666 const_binop (MINUS_EXPR, arg2,
4667 integer_one_node, 0),
4669 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4674 /* If C1 is C2 + 1, this is max(A, C2). */
4675 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4677 && operand_equal_p (arg01,
4678 const_binop (PLUS_EXPR, arg2,
4679 integer_one_node, 0),
4681 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4695 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4696 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4699 /* EXP is some logical combination of boolean tests. See if we can
4700 merge it into some range test. Return the new tree if so. */
4703 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4705 int or_op = (code == TRUTH_ORIF_EXPR
4706 || code == TRUTH_OR_EXPR);
4707 int in0_p, in1_p, in_p;
4708 tree low0, low1, low, high0, high1, high;
4709 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4710 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4713 /* If this is an OR operation, invert both sides; we will invert
4714 again at the end. */
4716 in0_p = ! in0_p, in1_p = ! in1_p;
4718 /* If both expressions are the same, if we can merge the ranges, and we
4719 can build the range test, return it or it inverted. If one of the
4720 ranges is always true or always false, consider it to be the same
4721 expression as the other. */
4722 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4723 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4725 && 0 != (tem = (build_range_check (type,
4727 : rhs != 0 ? rhs : integer_zero_node,
4729 return or_op ? invert_truthvalue (tem) : tem;
4731 /* On machines where the branch cost is expensive, if this is a
4732 short-circuited branch and the underlying object on both sides
4733 is the same, make a non-short-circuit operation. */
4734 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4735 && lhs != 0 && rhs != 0
4736 && (code == TRUTH_ANDIF_EXPR
4737 || code == TRUTH_ORIF_EXPR)
4738 && operand_equal_p (lhs, rhs, 0))
4740 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4741 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4742 which cases we can't do this. */
4743 if (simple_operand_p (lhs))
4744 return build2 (code == TRUTH_ANDIF_EXPR
4745 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4748 else if (lang_hooks.decls.global_bindings_p () == 0
4749 && ! CONTAINS_PLACEHOLDER_P (lhs))
4751 tree common = save_expr (lhs);
4753 if (0 != (lhs = build_range_check (type, common,
4754 or_op ? ! in0_p : in0_p,
4756 && (0 != (rhs = build_range_check (type, common,
4757 or_op ? ! in1_p : in1_p,
4759 return build2 (code == TRUTH_ANDIF_EXPR
4760 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4768 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4769 bit value. Arrange things so the extra bits will be set to zero if and
4770 only if C is signed-extended to its full width. If MASK is nonzero,
4771 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4774 unextend (tree c, int p, int unsignedp, tree mask)
4776 tree type = TREE_TYPE (c);
4777 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4780 if (p == modesize || unsignedp)
4783 /* We work by getting just the sign bit into the low-order bit, then
4784 into the high-order bit, then sign-extend. We then XOR that value
4786 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4787 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4789 /* We must use a signed type in order to get an arithmetic right shift.
4790 However, we must also avoid introducing accidental overflows, so that
4791 a subsequent call to integer_zerop will work. Hence we must
4792 do the type conversion here. At this point, the constant is either
4793 zero or one, and the conversion to a signed type can never overflow.
4794 We could get an overflow if this conversion is done anywhere else. */
4795 if (TYPE_UNSIGNED (type))
4796 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4798 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4799 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4801 temp = const_binop (BIT_AND_EXPR, temp,
4802 fold_convert (TREE_TYPE (c), mask), 0);
4803 /* If necessary, convert the type back to match the type of C. */
4804 if (TYPE_UNSIGNED (type))
4805 temp = fold_convert (type, temp);
4807 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4810 /* Find ways of folding logical expressions of LHS and RHS:
4811 Try to merge two comparisons to the same innermost item.
4812 Look for range tests like "ch >= '0' && ch <= '9'".
4813 Look for combinations of simple terms on machines with expensive branches
4814 and evaluate the RHS unconditionally.
4816 For example, if we have p->a == 2 && p->b == 4 and we can make an
4817 object large enough to span both A and B, we can do this with a comparison
4818 against the object ANDed with the a mask.
4820 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4821 operations to do this with one comparison.
4823 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4824 function and the one above.
4826 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4827 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4829 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4832 We return the simplified tree or 0 if no optimization is possible. */
4835 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4837 /* If this is the "or" of two comparisons, we can do something if
4838 the comparisons are NE_EXPR. If this is the "and", we can do something
4839 if the comparisons are EQ_EXPR. I.e.,
4840 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4842 WANTED_CODE is this operation code. For single bit fields, we can
4843 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4844 comparison for one-bit fields. */
4846 enum tree_code wanted_code;
4847 enum tree_code lcode, rcode;
4848 tree ll_arg, lr_arg, rl_arg, rr_arg;
4849 tree ll_inner, lr_inner, rl_inner, rr_inner;
4850 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4851 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4852 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4853 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4854 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4855 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4856 enum machine_mode lnmode, rnmode;
4857 tree ll_mask, lr_mask, rl_mask, rr_mask;
4858 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4859 tree l_const, r_const;
4860 tree lntype, rntype, result;
4861 int first_bit, end_bit;
4863 tree orig_lhs = lhs, orig_rhs = rhs;
4864 enum tree_code orig_code = code;
4866 /* Start by getting the comparison codes. Fail if anything is volatile.
4867 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4868 it were surrounded with a NE_EXPR. */
4870 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4873 lcode = TREE_CODE (lhs);
4874 rcode = TREE_CODE (rhs);
4876 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4878 lhs = build2 (NE_EXPR, truth_type, lhs,
4879 build_int_cst (TREE_TYPE (lhs), 0));
4883 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4885 rhs = build2 (NE_EXPR, truth_type, rhs,
4886 build_int_cst (TREE_TYPE (rhs), 0));
4890 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4891 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4894 ll_arg = TREE_OPERAND (lhs, 0);
4895 lr_arg = TREE_OPERAND (lhs, 1);
4896 rl_arg = TREE_OPERAND (rhs, 0);
4897 rr_arg = TREE_OPERAND (rhs, 1);
4899 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4900 if (simple_operand_p (ll_arg)
4901 && simple_operand_p (lr_arg))
4904 if (operand_equal_p (ll_arg, rl_arg, 0)
4905 && operand_equal_p (lr_arg, rr_arg, 0))
4907 result = combine_comparisons (code, lcode, rcode,
4908 truth_type, ll_arg, lr_arg);
4912 else if (operand_equal_p (ll_arg, rr_arg, 0)
4913 && operand_equal_p (lr_arg, rl_arg, 0))
4915 result = combine_comparisons (code, lcode,
4916 swap_tree_comparison (rcode),
4917 truth_type, ll_arg, lr_arg);
4923 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4924 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4926 /* If the RHS can be evaluated unconditionally and its operands are
4927 simple, it wins to evaluate the RHS unconditionally on machines
4928 with expensive branches. In this case, this isn't a comparison
4929 that can be merged. Avoid doing this if the RHS is a floating-point
4930 comparison since those can trap. */
4932 if (BRANCH_COST >= 2
4933 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4934 && simple_operand_p (rl_arg)
4935 && simple_operand_p (rr_arg))
4937 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4938 if (code == TRUTH_OR_EXPR
4939 && lcode == NE_EXPR && integer_zerop (lr_arg)
4940 && rcode == NE_EXPR && integer_zerop (rr_arg)
4941 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4942 return build2 (NE_EXPR, truth_type,
4943 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4945 build_int_cst (TREE_TYPE (ll_arg), 0));
4947 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4948 if (code == TRUTH_AND_EXPR
4949 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4950 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4951 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4952 return build2 (EQ_EXPR, truth_type,
4953 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4955 build_int_cst (TREE_TYPE (ll_arg), 0));
4957 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4959 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
4960 return build2 (code, truth_type, lhs, rhs);
4965 /* See if the comparisons can be merged. Then get all the parameters for
4968 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4969 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4973 ll_inner = decode_field_reference (ll_arg,
4974 &ll_bitsize, &ll_bitpos, &ll_mode,
4975 &ll_unsignedp, &volatilep, &ll_mask,
4977 lr_inner = decode_field_reference (lr_arg,
4978 &lr_bitsize, &lr_bitpos, &lr_mode,
4979 &lr_unsignedp, &volatilep, &lr_mask,
4981 rl_inner = decode_field_reference (rl_arg,
4982 &rl_bitsize, &rl_bitpos, &rl_mode,
4983 &rl_unsignedp, &volatilep, &rl_mask,
4985 rr_inner = decode_field_reference (rr_arg,
4986 &rr_bitsize, &rr_bitpos, &rr_mode,
4987 &rr_unsignedp, &volatilep, &rr_mask,
4990 /* It must be true that the inner operation on the lhs of each
4991 comparison must be the same if we are to be able to do anything.
4992 Then see if we have constants. If not, the same must be true for
4994 if (volatilep || ll_inner == 0 || rl_inner == 0
4995 || ! operand_equal_p (ll_inner, rl_inner, 0))
4998 if (TREE_CODE (lr_arg) == INTEGER_CST
4999 && TREE_CODE (rr_arg) == INTEGER_CST)
5000 l_const = lr_arg, r_const = rr_arg;
5001 else if (lr_inner == 0 || rr_inner == 0
5002 || ! operand_equal_p (lr_inner, rr_inner, 0))
5005 l_const = r_const = 0;
5007 /* If either comparison code is not correct for our logical operation,
5008 fail. However, we can convert a one-bit comparison against zero into
5009 the opposite comparison against that bit being set in the field. */
5011 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5012 if (lcode != wanted_code)
5014 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5016 /* Make the left operand unsigned, since we are only interested
5017 in the value of one bit. Otherwise we are doing the wrong
5026 /* This is analogous to the code for l_const above. */
5027 if (rcode != wanted_code)
5029 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5038 /* After this point all optimizations will generate bit-field
5039 references, which we might not want. */
5040 if (! lang_hooks.can_use_bit_fields_p ())
5043 /* See if we can find a mode that contains both fields being compared on
5044 the left. If we can't, fail. Otherwise, update all constants and masks
5045 to be relative to a field of that size. */
5046 first_bit = MIN (ll_bitpos, rl_bitpos);
5047 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5048 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5049 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5051 if (lnmode == VOIDmode)
5054 lnbitsize = GET_MODE_BITSIZE (lnmode);
5055 lnbitpos = first_bit & ~ (lnbitsize - 1);
5056 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5057 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5059 if (BYTES_BIG_ENDIAN)
5061 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5062 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5065 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5066 size_int (xll_bitpos), 0);
5067 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5068 size_int (xrl_bitpos), 0);
5072 l_const = fold_convert (lntype, l_const);
5073 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5074 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5075 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5076 fold_build1 (BIT_NOT_EXPR,
5080 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5082 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5087 r_const = fold_convert (lntype, r_const);
5088 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5089 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5090 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5091 fold_build1 (BIT_NOT_EXPR,
5095 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5097 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5101 /* If the right sides are not constant, do the same for it. Also,
5102 disallow this optimization if a size or signedness mismatch occurs
5103 between the left and right sides. */
5106 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5107 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5108 /* Make sure the two fields on the right
5109 correspond to the left without being swapped. */
5110 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5113 first_bit = MIN (lr_bitpos, rr_bitpos);
5114 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5115 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5116 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5118 if (rnmode == VOIDmode)
5121 rnbitsize = GET_MODE_BITSIZE (rnmode);
5122 rnbitpos = first_bit & ~ (rnbitsize - 1);
5123 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5124 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5126 if (BYTES_BIG_ENDIAN)
5128 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5129 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5132 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5133 size_int (xlr_bitpos), 0);
5134 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5135 size_int (xrr_bitpos), 0);
5137 /* Make a mask that corresponds to both fields being compared.
5138 Do this for both items being compared. If the operands are the
5139 same size and the bits being compared are in the same position
5140 then we can do this by masking both and comparing the masked
5142 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5143 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5144 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5146 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5147 ll_unsignedp || rl_unsignedp);
5148 if (! all_ones_mask_p (ll_mask, lnbitsize))
5149 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5151 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5152 lr_unsignedp || rr_unsignedp);
5153 if (! all_ones_mask_p (lr_mask, rnbitsize))
5154 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5156 return build2 (wanted_code, truth_type, lhs, rhs);
5159 /* There is still another way we can do something: If both pairs of
5160 fields being compared are adjacent, we may be able to make a wider
5161 field containing them both.
5163 Note that we still must mask the lhs/rhs expressions. Furthermore,
5164 the mask must be shifted to account for the shift done by
5165 make_bit_field_ref. */
5166 if ((ll_bitsize + ll_bitpos == rl_bitpos
5167 && lr_bitsize + lr_bitpos == rr_bitpos)
5168 || (ll_bitpos == rl_bitpos + rl_bitsize
5169 && lr_bitpos == rr_bitpos + rr_bitsize))
5173 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5174 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5175 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5176 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5178 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5179 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5180 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5181 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5183 /* Convert to the smaller type before masking out unwanted bits. */
5185 if (lntype != rntype)
5187 if (lnbitsize > rnbitsize)
5189 lhs = fold_convert (rntype, lhs);
5190 ll_mask = fold_convert (rntype, ll_mask);
5193 else if (lnbitsize < rnbitsize)
5195 rhs = fold_convert (lntype, rhs);
5196 lr_mask = fold_convert (lntype, lr_mask);
5201 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5202 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5204 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5205 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5207 return build2 (wanted_code, truth_type, lhs, rhs);
5213 /* Handle the case of comparisons with constants. If there is something in
5214 common between the masks, those bits of the constants must be the same.
5215 If not, the condition is always false. Test for this to avoid generating
5216 incorrect code below. */
5217 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5218 if (! integer_zerop (result)
5219 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5220 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5222 if (wanted_code == NE_EXPR)
5224 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5225 return constant_boolean_node (true, truth_type);
5229 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5230 return constant_boolean_node (false, truth_type);
5234 /* Construct the expression we will return. First get the component
5235 reference we will make. Unless the mask is all ones the width of
5236 that field, perform the mask operation. Then compare with the
5238 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5239 ll_unsignedp || rl_unsignedp);
5241 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5242 if (! all_ones_mask_p (ll_mask, lnbitsize))
5243 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5245 return build2 (wanted_code, truth_type, result,
5246 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5249 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5253 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5256 enum tree_code op_code;
5257 tree comp_const = op1;
5259 int consts_equal, consts_lt;
5262 STRIP_SIGN_NOPS (arg0);
5264 op_code = TREE_CODE (arg0);
5265 minmax_const = TREE_OPERAND (arg0, 1);
5266 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5267 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5268 inner = TREE_OPERAND (arg0, 0);
5270 /* If something does not permit us to optimize, return the original tree. */
5271 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5272 || TREE_CODE (comp_const) != INTEGER_CST
5273 || TREE_CONSTANT_OVERFLOW (comp_const)
5274 || TREE_CODE (minmax_const) != INTEGER_CST
5275 || TREE_CONSTANT_OVERFLOW (minmax_const))
5278 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5279 and GT_EXPR, doing the rest with recursive calls using logical
5283 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5285 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5288 return invert_truthvalue (tem);
5294 fold_build2 (TRUTH_ORIF_EXPR, type,
5295 optimize_minmax_comparison
5296 (EQ_EXPR, type, arg0, comp_const),
5297 optimize_minmax_comparison
5298 (GT_EXPR, type, arg0, comp_const));
5301 if (op_code == MAX_EXPR && consts_equal)
5302 /* MAX (X, 0) == 0 -> X <= 0 */
5303 return fold_build2 (LE_EXPR, type, inner, comp_const);
5305 else if (op_code == MAX_EXPR && consts_lt)
5306 /* MAX (X, 0) == 5 -> X == 5 */
5307 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5309 else if (op_code == MAX_EXPR)
5310 /* MAX (X, 0) == -1 -> false */
5311 return omit_one_operand (type, integer_zero_node, inner);
5313 else if (consts_equal)
5314 /* MIN (X, 0) == 0 -> X >= 0 */
5315 return fold_build2 (GE_EXPR, type, inner, comp_const);
5318 /* MIN (X, 0) == 5 -> false */
5319 return omit_one_operand (type, integer_zero_node, inner);
5322 /* MIN (X, 0) == -1 -> X == -1 */
5323 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5326 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5327 /* MAX (X, 0) > 0 -> X > 0
5328 MAX (X, 0) > 5 -> X > 5 */
5329 return fold_build2 (GT_EXPR, type, inner, comp_const);
5331 else if (op_code == MAX_EXPR)
5332 /* MAX (X, 0) > -1 -> true */
5333 return omit_one_operand (type, integer_one_node, inner);
5335 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5336 /* MIN (X, 0) > 0 -> false
5337 MIN (X, 0) > 5 -> false */
5338 return omit_one_operand (type, integer_zero_node, inner);
5341 /* MIN (X, 0) > -1 -> X > -1 */
5342 return fold_build2 (GT_EXPR, type, inner, comp_const);
5349 /* T is an integer expression that is being multiplied, divided, or taken a
5350 modulus (CODE says which and what kind of divide or modulus) by a
5351 constant C. See if we can eliminate that operation by folding it with
5352 other operations already in T. WIDE_TYPE, if non-null, is a type that
5353 should be used for the computation if wider than our type.
5355 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5356 (X * 2) + (Y * 4). We must, however, be assured that either the original
5357 expression would not overflow or that overflow is undefined for the type
5358 in the language in question.
5360 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5361 the machine has a multiply-accumulate insn or that this is part of an
5362 addressing calculation.
5364 If we return a non-null expression, it is an equivalent form of the
5365 original computation, but need not be in the original type. */
5368 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5370 /* To avoid exponential search depth, refuse to allow recursion past
5371 three levels. Beyond that (1) it's highly unlikely that we'll find
5372 something interesting and (2) we've probably processed it before
5373 when we built the inner expression. */
5382 ret = extract_muldiv_1 (t, c, code, wide_type);
5389 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5391 tree type = TREE_TYPE (t);
5392 enum tree_code tcode = TREE_CODE (t);
5393 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5394 > GET_MODE_SIZE (TYPE_MODE (type)))
5395 ? wide_type : type);
5397 int same_p = tcode == code;
5398 tree op0 = NULL_TREE, op1 = NULL_TREE;
5400 /* Don't deal with constants of zero here; they confuse the code below. */
5401 if (integer_zerop (c))
5404 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5405 op0 = TREE_OPERAND (t, 0);
5407 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5408 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5410 /* Note that we need not handle conditional operations here since fold
5411 already handles those cases. So just do arithmetic here. */
5415 /* For a constant, we can always simplify if we are a multiply
5416 or (for divide and modulus) if it is a multiple of our constant. */
5417 if (code == MULT_EXPR
5418 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5419 return const_binop (code, fold_convert (ctype, t),
5420 fold_convert (ctype, c), 0);
5423 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5424 /* If op0 is an expression ... */
5425 if ((COMPARISON_CLASS_P (op0)
5426 || UNARY_CLASS_P (op0)
5427 || BINARY_CLASS_P (op0)
5428 || EXPRESSION_CLASS_P (op0))
5429 /* ... and is unsigned, and its type is smaller than ctype,
5430 then we cannot pass through as widening. */
5431 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5432 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5433 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5434 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5435 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5436 /* ... or this is a truncation (t is narrower than op0),
5437 then we cannot pass through this narrowing. */
5438 || (GET_MODE_SIZE (TYPE_MODE (type))
5439 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5440 /* ... or signedness changes for division or modulus,
5441 then we cannot pass through this conversion. */
5442 || (code != MULT_EXPR
5443 && (TYPE_UNSIGNED (ctype)
5444 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5447 /* Pass the constant down and see if we can make a simplification. If
5448 we can, replace this expression with the inner simplification for
5449 possible later conversion to our or some other type. */
5450 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5451 && TREE_CODE (t2) == INTEGER_CST
5452 && ! TREE_CONSTANT_OVERFLOW (t2)
5453 && (0 != (t1 = extract_muldiv (op0, t2, code,
5455 ? ctype : NULL_TREE))))
5460 /* If widening the type changes it from signed to unsigned, then we
5461 must avoid building ABS_EXPR itself as unsigned. */
5462 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5464 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5465 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5467 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5468 return fold_convert (ctype, t1);
5474 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5475 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5478 case MIN_EXPR: case MAX_EXPR:
5479 /* If widening the type changes the signedness, then we can't perform
5480 this optimization as that changes the result. */
5481 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5484 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5485 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5486 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5488 if (tree_int_cst_sgn (c) < 0)
5489 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5491 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5492 fold_convert (ctype, t2));
5496 case LSHIFT_EXPR: case RSHIFT_EXPR:
5497 /* If the second operand is constant, this is a multiplication
5498 or floor division, by a power of two, so we can treat it that
5499 way unless the multiplier or divisor overflows. Signed
5500 left-shift overflow is implementation-defined rather than
5501 undefined in C90, so do not convert signed left shift into
5503 if (TREE_CODE (op1) == INTEGER_CST
5504 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5505 /* const_binop may not detect overflow correctly,
5506 so check for it explicitly here. */
5507 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5508 && TREE_INT_CST_HIGH (op1) == 0
5509 && 0 != (t1 = fold_convert (ctype,
5510 const_binop (LSHIFT_EXPR,
5513 && ! TREE_OVERFLOW (t1))
5514 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5515 ? MULT_EXPR : FLOOR_DIV_EXPR,
5516 ctype, fold_convert (ctype, op0), t1),
5517 c, code, wide_type);
5520 case PLUS_EXPR: case MINUS_EXPR:
5521 /* See if we can eliminate the operation on both sides. If we can, we
5522 can return a new PLUS or MINUS. If we can't, the only remaining
5523 cases where we can do anything are if the second operand is a
5525 t1 = extract_muldiv (op0, c, code, wide_type);
5526 t2 = extract_muldiv (op1, c, code, wide_type);
5527 if (t1 != 0 && t2 != 0
5528 && (code == MULT_EXPR
5529 /* If not multiplication, we can only do this if both operands
5530 are divisible by c. */
5531 || (multiple_of_p (ctype, op0, c)
5532 && multiple_of_p (ctype, op1, c))))
5533 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5534 fold_convert (ctype, t2));
5536 /* If this was a subtraction, negate OP1 and set it to be an addition.
5537 This simplifies the logic below. */
5538 if (tcode == MINUS_EXPR)
5539 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5541 if (TREE_CODE (op1) != INTEGER_CST)
5544 /* If either OP1 or C are negative, this optimization is not safe for
5545 some of the division and remainder types while for others we need
5546 to change the code. */
5547 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5549 if (code == CEIL_DIV_EXPR)
5550 code = FLOOR_DIV_EXPR;
5551 else if (code == FLOOR_DIV_EXPR)
5552 code = CEIL_DIV_EXPR;
5553 else if (code != MULT_EXPR
5554 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5558 /* If it's a multiply or a division/modulus operation of a multiple
5559 of our constant, do the operation and verify it doesn't overflow. */
5560 if (code == MULT_EXPR
5561 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5563 op1 = const_binop (code, fold_convert (ctype, op1),
5564 fold_convert (ctype, c), 0);
5565 /* We allow the constant to overflow with wrapping semantics. */
5567 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5573 /* If we have an unsigned type is not a sizetype, we cannot widen
5574 the operation since it will change the result if the original
5575 computation overflowed. */
5576 if (TYPE_UNSIGNED (ctype)
5577 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5581 /* If we were able to eliminate our operation from the first side,
5582 apply our operation to the second side and reform the PLUS. */
5583 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5584 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5586 /* The last case is if we are a multiply. In that case, we can
5587 apply the distributive law to commute the multiply and addition
5588 if the multiplication of the constants doesn't overflow. */
5589 if (code == MULT_EXPR)
5590 return fold_build2 (tcode, ctype,
5591 fold_build2 (code, ctype,
5592 fold_convert (ctype, op0),
5593 fold_convert (ctype, c)),
5599 /* We have a special case here if we are doing something like
5600 (C * 8) % 4 since we know that's zero. */
5601 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5602 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5603 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5604 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5605 return omit_one_operand (type, integer_zero_node, op0);
5607 /* ... fall through ... */
5609 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5610 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5611 /* If we can extract our operation from the LHS, do so and return a
5612 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5613 do something only if the second operand is a constant. */
5615 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5616 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5617 fold_convert (ctype, op1));
5618 else if (tcode == MULT_EXPR && code == MULT_EXPR
5619 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5620 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5621 fold_convert (ctype, t1));
5622 else if (TREE_CODE (op1) != INTEGER_CST)
5625 /* If these are the same operation types, we can associate them
5626 assuming no overflow. */
5628 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5629 fold_convert (ctype, c), 0))
5630 && ! TREE_OVERFLOW (t1))
5631 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5633 /* If these operations "cancel" each other, we have the main
5634 optimizations of this pass, which occur when either constant is a
5635 multiple of the other, in which case we replace this with either an
5636 operation or CODE or TCODE.
5638 If we have an unsigned type that is not a sizetype, we cannot do
5639 this since it will change the result if the original computation
5641 if ((! TYPE_UNSIGNED (ctype)
5642 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5644 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5645 || (tcode == MULT_EXPR
5646 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5647 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5649 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5650 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5651 fold_convert (ctype,
5652 const_binop (TRUNC_DIV_EXPR,
5654 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5655 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5656 fold_convert (ctype,
5657 const_binop (TRUNC_DIV_EXPR,
5669 /* Return a node which has the indicated constant VALUE (either 0 or
5670 1), and is of the indicated TYPE. */
5673 constant_boolean_node (int value, tree type)
5675 if (type == integer_type_node)
5676 return value ? integer_one_node : integer_zero_node;
5677 else if (type == boolean_type_node)
5678 return value ? boolean_true_node : boolean_false_node;
5680 return build_int_cst (type, value);
5684 /* Return true if expr looks like an ARRAY_REF and set base and
5685 offset to the appropriate trees. If there is no offset,
5686 offset is set to NULL_TREE. Base will be canonicalized to
5687 something you can get the element type from using
5688 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5689 in bytes to the base. */
5692 extract_array_ref (tree expr, tree *base, tree *offset)
5694 /* One canonical form is a PLUS_EXPR with the first
5695 argument being an ADDR_EXPR with a possible NOP_EXPR
5697 if (TREE_CODE (expr) == PLUS_EXPR)
5699 tree op0 = TREE_OPERAND (expr, 0);
5700 tree inner_base, dummy1;
5701 /* Strip NOP_EXPRs here because the C frontends and/or
5702 folders present us (int *)&x.a + 4B possibly. */
5704 if (extract_array_ref (op0, &inner_base, &dummy1))
5707 if (dummy1 == NULL_TREE)
5708 *offset = TREE_OPERAND (expr, 1);
5710 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5711 dummy1, TREE_OPERAND (expr, 1));
5715 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5716 which we transform into an ADDR_EXPR with appropriate
5717 offset. For other arguments to the ADDR_EXPR we assume
5718 zero offset and as such do not care about the ADDR_EXPR
5719 type and strip possible nops from it. */
5720 else if (TREE_CODE (expr) == ADDR_EXPR)
5722 tree op0 = TREE_OPERAND (expr, 0);
5723 if (TREE_CODE (op0) == ARRAY_REF)
5725 tree idx = TREE_OPERAND (op0, 1);
5726 *base = TREE_OPERAND (op0, 0);
5727 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5728 array_ref_element_size (op0));
5732 /* Handle array-to-pointer decay as &a. */
5733 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5734 *base = TREE_OPERAND (expr, 0);
5737 *offset = NULL_TREE;
5741 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5742 else if (SSA_VAR_P (expr)
5743 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5746 *offset = NULL_TREE;
5754 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5755 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5756 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5757 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5758 COND is the first argument to CODE; otherwise (as in the example
5759 given here), it is the second argument. TYPE is the type of the
5760 original expression. Return NULL_TREE if no simplification is
5764 fold_binary_op_with_conditional_arg (enum tree_code code,
5765 tree type, tree op0, tree op1,
5766 tree cond, tree arg, int cond_first_p)
5768 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5769 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5770 tree test, true_value, false_value;
5771 tree lhs = NULL_TREE;
5772 tree rhs = NULL_TREE;
5774 /* This transformation is only worthwhile if we don't have to wrap
5775 arg in a SAVE_EXPR, and the operation can be simplified on at least
5776 one of the branches once its pushed inside the COND_EXPR. */
5777 if (!TREE_CONSTANT (arg))
5780 if (TREE_CODE (cond) == COND_EXPR)
5782 test = TREE_OPERAND (cond, 0);
5783 true_value = TREE_OPERAND (cond, 1);
5784 false_value = TREE_OPERAND (cond, 2);
5785 /* If this operand throws an expression, then it does not make
5786 sense to try to perform a logical or arithmetic operation
5788 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5790 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5795 tree testtype = TREE_TYPE (cond);
5797 true_value = constant_boolean_node (true, testtype);
5798 false_value = constant_boolean_node (false, testtype);
5801 arg = fold_convert (arg_type, arg);
5804 true_value = fold_convert (cond_type, true_value);
5806 lhs = fold_build2 (code, type, true_value, arg);
5808 lhs = fold_build2 (code, type, arg, true_value);
5812 false_value = fold_convert (cond_type, false_value);
5814 rhs = fold_build2 (code, type, false_value, arg);
5816 rhs = fold_build2 (code, type, arg, false_value);
5819 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5820 return fold_convert (type, test);
5824 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5826 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5827 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5828 ADDEND is the same as X.
5830 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5831 and finite. The problematic cases are when X is zero, and its mode
5832 has signed zeros. In the case of rounding towards -infinity,
5833 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5834 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5837 fold_real_zero_addition_p (tree type, tree addend, int negate)
5839 if (!real_zerop (addend))
5842 /* Don't allow the fold with -fsignaling-nans. */
5843 if (HONOR_SNANS (TYPE_MODE (type)))
5846 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5847 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5850 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5851 if (TREE_CODE (addend) == REAL_CST
5852 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5855 /* The mode has signed zeros, and we have to honor their sign.
5856 In this situation, there is only one case we can return true for.
5857 X - 0 is the same as X unless rounding towards -infinity is
5859 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5862 /* Subroutine of fold() that checks comparisons of built-in math
5863 functions against real constants.
5865 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5866 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5867 is the type of the result and ARG0 and ARG1 are the operands of the
5868 comparison. ARG1 must be a TREE_REAL_CST.
5870 The function returns the constant folded tree if a simplification
5871 can be made, and NULL_TREE otherwise. */
5874 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5875 tree type, tree arg0, tree arg1)
5879 if (BUILTIN_SQRT_P (fcode))
5881 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5882 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5884 c = TREE_REAL_CST (arg1);
5885 if (REAL_VALUE_NEGATIVE (c))
5887 /* sqrt(x) < y is always false, if y is negative. */
5888 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5889 return omit_one_operand (type, integer_zero_node, arg);
5891 /* sqrt(x) > y is always true, if y is negative and we
5892 don't care about NaNs, i.e. negative values of x. */
5893 if (code == NE_EXPR || !HONOR_NANS (mode))
5894 return omit_one_operand (type, integer_one_node, arg);
5896 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5897 return fold_build2 (GE_EXPR, type, arg,
5898 build_real (TREE_TYPE (arg), dconst0));
5900 else if (code == GT_EXPR || code == GE_EXPR)
5904 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5905 real_convert (&c2, mode, &c2);
5907 if (REAL_VALUE_ISINF (c2))
5909 /* sqrt(x) > y is x == +Inf, when y is very large. */
5910 if (HONOR_INFINITIES (mode))
5911 return fold_build2 (EQ_EXPR, type, arg,
5912 build_real (TREE_TYPE (arg), c2));
5914 /* sqrt(x) > y is always false, when y is very large
5915 and we don't care about infinities. */
5916 return omit_one_operand (type, integer_zero_node, arg);
5919 /* sqrt(x) > c is the same as x > c*c. */
5920 return fold_build2 (code, type, arg,
5921 build_real (TREE_TYPE (arg), c2));
5923 else if (code == LT_EXPR || code == LE_EXPR)
5927 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5928 real_convert (&c2, mode, &c2);
5930 if (REAL_VALUE_ISINF (c2))
5932 /* sqrt(x) < y is always true, when y is a very large
5933 value and we don't care about NaNs or Infinities. */
5934 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5935 return omit_one_operand (type, integer_one_node, arg);
5937 /* sqrt(x) < y is x != +Inf when y is very large and we
5938 don't care about NaNs. */
5939 if (! HONOR_NANS (mode))
5940 return fold_build2 (NE_EXPR, type, arg,
5941 build_real (TREE_TYPE (arg), c2));
5943 /* sqrt(x) < y is x >= 0 when y is very large and we
5944 don't care about Infinities. */
5945 if (! HONOR_INFINITIES (mode))
5946 return fold_build2 (GE_EXPR, type, arg,
5947 build_real (TREE_TYPE (arg), dconst0));
5949 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5950 if (lang_hooks.decls.global_bindings_p () != 0
5951 || CONTAINS_PLACEHOLDER_P (arg))
5954 arg = save_expr (arg);
5955 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5956 fold_build2 (GE_EXPR, type, arg,
5957 build_real (TREE_TYPE (arg),
5959 fold_build2 (NE_EXPR, type, arg,
5960 build_real (TREE_TYPE (arg),
5964 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5965 if (! HONOR_NANS (mode))
5966 return fold_build2 (code, type, arg,
5967 build_real (TREE_TYPE (arg), c2));
5969 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5970 if (lang_hooks.decls.global_bindings_p () == 0
5971 && ! CONTAINS_PLACEHOLDER_P (arg))
5973 arg = save_expr (arg);
5974 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5975 fold_build2 (GE_EXPR, type, arg,
5976 build_real (TREE_TYPE (arg),
5978 fold_build2 (code, type, arg,
5979 build_real (TREE_TYPE (arg),
5988 /* Subroutine of fold() that optimizes comparisons against Infinities,
5989 either +Inf or -Inf.
5991 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5992 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5993 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5995 The function returns the constant folded tree if a simplification
5996 can be made, and NULL_TREE otherwise. */
5999 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6001 enum machine_mode mode;
6002 REAL_VALUE_TYPE max;
6006 mode = TYPE_MODE (TREE_TYPE (arg0));
6008 /* For negative infinity swap the sense of the comparison. */
6009 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6011 code = swap_tree_comparison (code);
6016 /* x > +Inf is always false, if with ignore sNANs. */
6017 if (HONOR_SNANS (mode))
6019 return omit_one_operand (type, integer_zero_node, arg0);
6022 /* x <= +Inf is always true, if we don't case about NaNs. */
6023 if (! HONOR_NANS (mode))
6024 return omit_one_operand (type, integer_one_node, arg0);
6026 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6027 if (lang_hooks.decls.global_bindings_p () == 0
6028 && ! CONTAINS_PLACEHOLDER_P (arg0))
6030 arg0 = save_expr (arg0);
6031 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6037 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6038 real_maxval (&max, neg, mode);
6039 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6040 arg0, build_real (TREE_TYPE (arg0), max));
6043 /* x < +Inf is always equal to x <= DBL_MAX. */
6044 real_maxval (&max, neg, mode);
6045 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6046 arg0, build_real (TREE_TYPE (arg0), max));
6049 /* x != +Inf is always equal to !(x > DBL_MAX). */
6050 real_maxval (&max, neg, mode);
6051 if (! HONOR_NANS (mode))
6052 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6053 arg0, build_real (TREE_TYPE (arg0), max));
6055 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6056 arg0, build_real (TREE_TYPE (arg0), max));
6057 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6066 /* Subroutine of fold() that optimizes comparisons of a division by
6067 a nonzero integer constant against an integer constant, i.e.
6070 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6071 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6072 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6074 The function returns the constant folded tree if a simplification
6075 can be made, and NULL_TREE otherwise. */
6078 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6080 tree prod, tmp, hi, lo;
6081 tree arg00 = TREE_OPERAND (arg0, 0);
6082 tree arg01 = TREE_OPERAND (arg0, 1);
6083 unsigned HOST_WIDE_INT lpart;
6084 HOST_WIDE_INT hpart;
6085 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6089 /* We have to do this the hard way to detect unsigned overflow.
6090 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6091 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6092 TREE_INT_CST_HIGH (arg01),
6093 TREE_INT_CST_LOW (arg1),
6094 TREE_INT_CST_HIGH (arg1),
6095 &lpart, &hpart, unsigned_p);
6096 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6097 prod = force_fit_type (prod, -1, overflow, false);
6098 neg_overflow = false;
6102 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6105 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6106 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6107 TREE_INT_CST_HIGH (prod),
6108 TREE_INT_CST_LOW (tmp),
6109 TREE_INT_CST_HIGH (tmp),
6110 &lpart, &hpart, unsigned_p);
6111 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6112 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6113 TREE_CONSTANT_OVERFLOW (prod));
6115 else if (tree_int_cst_sgn (arg01) >= 0)
6117 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6118 switch (tree_int_cst_sgn (arg1))
6121 neg_overflow = true;
6122 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6127 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6132 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6142 /* A negative divisor reverses the relational operators. */
6143 code = swap_tree_comparison (code);
6145 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6146 switch (tree_int_cst_sgn (arg1))
6149 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6154 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6159 neg_overflow = true;
6160 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6172 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6173 return omit_one_operand (type, integer_zero_node, arg00);
6174 if (TREE_OVERFLOW (hi))
6175 return fold_build2 (GE_EXPR, type, arg00, lo);
6176 if (TREE_OVERFLOW (lo))
6177 return fold_build2 (LE_EXPR, type, arg00, hi);
6178 return build_range_check (type, arg00, 1, lo, hi);
6181 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6182 return omit_one_operand (type, integer_one_node, arg00);
6183 if (TREE_OVERFLOW (hi))
6184 return fold_build2 (LT_EXPR, type, arg00, lo);
6185 if (TREE_OVERFLOW (lo))
6186 return fold_build2 (GT_EXPR, type, arg00, hi);
6187 return build_range_check (type, arg00, 0, lo, hi);
6190 if (TREE_OVERFLOW (lo))
6192 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6193 return omit_one_operand (type, tmp, arg00);
6195 return fold_build2 (LT_EXPR, type, arg00, lo);
6198 if (TREE_OVERFLOW (hi))
6200 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6201 return omit_one_operand (type, tmp, arg00);
6203 return fold_build2 (LE_EXPR, type, arg00, hi);
6206 if (TREE_OVERFLOW (hi))
6208 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6209 return omit_one_operand (type, tmp, arg00);
6211 return fold_build2 (GT_EXPR, type, arg00, hi);
6214 if (TREE_OVERFLOW (lo))
6216 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6217 return omit_one_operand (type, tmp, arg00);
6219 return fold_build2 (GE_EXPR, type, arg00, lo);
6229 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6230 equality/inequality test, then return a simplified form of the test
6231 using a sign testing. Otherwise return NULL. TYPE is the desired
6235 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6238 /* If this is testing a single bit, we can optimize the test. */
6239 if ((code == NE_EXPR || code == EQ_EXPR)
6240 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6241 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6243 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6244 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6245 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6247 if (arg00 != NULL_TREE
6248 /* This is only a win if casting to a signed type is cheap,
6249 i.e. when arg00's type is not a partial mode. */
6250 && TYPE_PRECISION (TREE_TYPE (arg00))
6251 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6253 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6254 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6255 result_type, fold_convert (stype, arg00),
6256 build_int_cst (stype, 0));
6263 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6264 equality/inequality test, then return a simplified form of
6265 the test using shifts and logical operations. Otherwise return
6266 NULL. TYPE is the desired result type. */
6269 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6272 /* If this is testing a single bit, we can optimize the test. */
6273 if ((code == NE_EXPR || code == EQ_EXPR)
6274 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6275 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6277 tree inner = TREE_OPERAND (arg0, 0);
6278 tree type = TREE_TYPE (arg0);
6279 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6280 enum machine_mode operand_mode = TYPE_MODE (type);
6282 tree signed_type, unsigned_type, intermediate_type;
6285 /* First, see if we can fold the single bit test into a sign-bit
6287 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6292 /* Otherwise we have (A & C) != 0 where C is a single bit,
6293 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6294 Similarly for (A & C) == 0. */
6296 /* If INNER is a right shift of a constant and it plus BITNUM does
6297 not overflow, adjust BITNUM and INNER. */
6298 if (TREE_CODE (inner) == RSHIFT_EXPR
6299 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6300 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6301 && bitnum < TYPE_PRECISION (type)
6302 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6303 bitnum - TYPE_PRECISION (type)))
6305 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6306 inner = TREE_OPERAND (inner, 0);
6309 /* If we are going to be able to omit the AND below, we must do our
6310 operations as unsigned. If we must use the AND, we have a choice.
6311 Normally unsigned is faster, but for some machines signed is. */
6312 #ifdef LOAD_EXTEND_OP
6313 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6314 && !flag_syntax_only) ? 0 : 1;
6319 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6320 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6321 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6322 inner = fold_convert (intermediate_type, inner);
6325 inner = build2 (RSHIFT_EXPR, intermediate_type,
6326 inner, size_int (bitnum));
6328 if (code == EQ_EXPR)
6329 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6330 inner, integer_one_node);
6332 /* Put the AND last so it can combine with more things. */
6333 inner = build2 (BIT_AND_EXPR, intermediate_type,
6334 inner, integer_one_node);
6336 /* Make sure to return the proper type. */
6337 inner = fold_convert (result_type, inner);
6344 /* Check whether we are allowed to reorder operands arg0 and arg1,
6345 such that the evaluation of arg1 occurs before arg0. */
6348 reorder_operands_p (tree arg0, tree arg1)
6350 if (! flag_evaluation_order)
6352 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6354 return ! TREE_SIDE_EFFECTS (arg0)
6355 && ! TREE_SIDE_EFFECTS (arg1);
6358 /* Test whether it is preferable two swap two operands, ARG0 and
6359 ARG1, for example because ARG0 is an integer constant and ARG1
6360 isn't. If REORDER is true, only recommend swapping if we can
6361 evaluate the operands in reverse order. */
6364 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6366 STRIP_SIGN_NOPS (arg0);
6367 STRIP_SIGN_NOPS (arg1);
6369 if (TREE_CODE (arg1) == INTEGER_CST)
6371 if (TREE_CODE (arg0) == INTEGER_CST)
6374 if (TREE_CODE (arg1) == REAL_CST)
6376 if (TREE_CODE (arg0) == REAL_CST)
6379 if (TREE_CODE (arg1) == COMPLEX_CST)
6381 if (TREE_CODE (arg0) == COMPLEX_CST)
6384 if (TREE_CONSTANT (arg1))
6386 if (TREE_CONSTANT (arg0))
6392 if (reorder && flag_evaluation_order
6393 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6401 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6402 for commutative and comparison operators. Ensuring a canonical
6403 form allows the optimizers to find additional redundancies without
6404 having to explicitly check for both orderings. */
6405 if (TREE_CODE (arg0) == SSA_NAME
6406 && TREE_CODE (arg1) == SSA_NAME
6407 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6413 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6414 ARG0 is extended to a wider type. */
6417 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6419 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6421 tree shorter_type, outer_type;
6425 if (arg0_unw == arg0)
6427 shorter_type = TREE_TYPE (arg0_unw);
6429 #ifdef HAVE_canonicalize_funcptr_for_compare
6430 /* Disable this optimization if we're casting a function pointer
6431 type on targets that require function pointer canonicalization. */
6432 if (HAVE_canonicalize_funcptr_for_compare
6433 && TREE_CODE (shorter_type) == POINTER_TYPE
6434 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6438 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6441 arg1_unw = get_unwidened (arg1, shorter_type);
6443 /* If possible, express the comparison in the shorter mode. */
6444 if ((code == EQ_EXPR || code == NE_EXPR
6445 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6446 && (TREE_TYPE (arg1_unw) == shorter_type
6447 || (TREE_CODE (arg1_unw) == INTEGER_CST
6448 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6449 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6450 && int_fits_type_p (arg1_unw, shorter_type))))
6451 return fold_build2 (code, type, arg0_unw,
6452 fold_convert (shorter_type, arg1_unw));
6454 if (TREE_CODE (arg1_unw) != INTEGER_CST
6455 || TREE_CODE (shorter_type) != INTEGER_TYPE
6456 || !int_fits_type_p (arg1_unw, shorter_type))
6459 /* If we are comparing with the integer that does not fit into the range
6460 of the shorter type, the result is known. */
6461 outer_type = TREE_TYPE (arg1_unw);
6462 min = lower_bound_in_type (outer_type, shorter_type);
6463 max = upper_bound_in_type (outer_type, shorter_type);
6465 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6467 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6474 return omit_one_operand (type, integer_zero_node, arg0);
6479 return omit_one_operand (type, integer_one_node, arg0);
6485 return omit_one_operand (type, integer_one_node, arg0);
6487 return omit_one_operand (type, integer_zero_node, arg0);
6492 return omit_one_operand (type, integer_zero_node, arg0);
6494 return omit_one_operand (type, integer_one_node, arg0);
6503 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6504 ARG0 just the signedness is changed. */
6507 fold_sign_changed_comparison (enum tree_code code, tree type,
6508 tree arg0, tree arg1)
6510 tree arg0_inner, tmp;
6511 tree inner_type, outer_type;
6513 if (TREE_CODE (arg0) != NOP_EXPR
6514 && TREE_CODE (arg0) != CONVERT_EXPR)
6517 outer_type = TREE_TYPE (arg0);
6518 arg0_inner = TREE_OPERAND (arg0, 0);
6519 inner_type = TREE_TYPE (arg0_inner);
6521 #ifdef HAVE_canonicalize_funcptr_for_compare
6522 /* Disable this optimization if we're casting a function pointer
6523 type on targets that require function pointer canonicalization. */
6524 if (HAVE_canonicalize_funcptr_for_compare
6525 && TREE_CODE (inner_type) == POINTER_TYPE
6526 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6530 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6533 if (TREE_CODE (arg1) != INTEGER_CST
6534 && !((TREE_CODE (arg1) == NOP_EXPR
6535 || TREE_CODE (arg1) == CONVERT_EXPR)
6536 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6539 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6544 if (TREE_CODE (arg1) == INTEGER_CST)
6546 tmp = build_int_cst_wide (inner_type,
6547 TREE_INT_CST_LOW (arg1),
6548 TREE_INT_CST_HIGH (arg1));
6549 arg1 = force_fit_type (tmp, 0,
6550 TREE_OVERFLOW (arg1),
6551 TREE_CONSTANT_OVERFLOW (arg1));
6554 arg1 = fold_convert (inner_type, arg1);
6556 return fold_build2 (code, type, arg0_inner, arg1);
6559 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6560 step of the array. Reconstructs s and delta in the case of s * delta
6561 being an integer constant (and thus already folded).
6562 ADDR is the address. MULT is the multiplicative expression.
6563 If the function succeeds, the new address expression is returned. Otherwise
6564 NULL_TREE is returned. */
6567 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6569 tree s, delta, step;
6570 tree ref = TREE_OPERAND (addr, 0), pref;
6574 /* Canonicalize op1 into a possibly non-constant delta
6575 and an INTEGER_CST s. */
6576 if (TREE_CODE (op1) == MULT_EXPR)
6578 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6583 if (TREE_CODE (arg0) == INTEGER_CST)
6588 else if (TREE_CODE (arg1) == INTEGER_CST)
6596 else if (TREE_CODE (op1) == INTEGER_CST)
6603 /* Simulate we are delta * 1. */
6605 s = integer_one_node;
6608 for (;; ref = TREE_OPERAND (ref, 0))
6610 if (TREE_CODE (ref) == ARRAY_REF)
6612 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6616 step = array_ref_element_size (ref);
6617 if (TREE_CODE (step) != INTEGER_CST)
6622 if (! tree_int_cst_equal (step, s))
6627 /* Try if delta is a multiple of step. */
6628 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6637 if (!handled_component_p (ref))
6641 /* We found the suitable array reference. So copy everything up to it,
6642 and replace the index. */
6644 pref = TREE_OPERAND (addr, 0);
6645 ret = copy_node (pref);
6650 pref = TREE_OPERAND (pref, 0);
6651 TREE_OPERAND (pos, 0) = copy_node (pref);
6652 pos = TREE_OPERAND (pos, 0);
6655 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6656 fold_convert (itype,
6657 TREE_OPERAND (pos, 1)),
6658 fold_convert (itype, delta));
6660 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6664 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6665 means A >= Y && A != MAX, but in this case we know that
6666 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6669 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6671 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6673 if (TREE_CODE (bound) == LT_EXPR)
6674 a = TREE_OPERAND (bound, 0);
6675 else if (TREE_CODE (bound) == GT_EXPR)
6676 a = TREE_OPERAND (bound, 1);
6680 typea = TREE_TYPE (a);
6681 if (!INTEGRAL_TYPE_P (typea)
6682 && !POINTER_TYPE_P (typea))
6685 if (TREE_CODE (ineq) == LT_EXPR)
6687 a1 = TREE_OPERAND (ineq, 1);
6688 y = TREE_OPERAND (ineq, 0);
6690 else if (TREE_CODE (ineq) == GT_EXPR)
6692 a1 = TREE_OPERAND (ineq, 0);
6693 y = TREE_OPERAND (ineq, 1);
6698 if (TREE_TYPE (a1) != typea)
6701 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6702 if (!integer_onep (diff))
6705 return fold_build2 (GE_EXPR, type, a, y);
6708 /* Fold a sum or difference of at least one multiplication.
6709 Returns the folded tree or NULL if no simplification could be made. */
6712 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6714 tree arg00, arg01, arg10, arg11;
6715 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6717 /* (A * C) +- (B * C) -> (A+-B) * C.
6718 (A * C) +- A -> A * (C+-1).
6719 We are most concerned about the case where C is a constant,
6720 but other combinations show up during loop reduction. Since
6721 it is not difficult, try all four possibilities. */
6723 if (TREE_CODE (arg0) == MULT_EXPR)
6725 arg00 = TREE_OPERAND (arg0, 0);
6726 arg01 = TREE_OPERAND (arg0, 1);
6731 arg01 = build_one_cst (type);
6733 if (TREE_CODE (arg1) == MULT_EXPR)
6735 arg10 = TREE_OPERAND (arg1, 0);
6736 arg11 = TREE_OPERAND (arg1, 1);
6741 arg11 = build_one_cst (type);
6745 if (operand_equal_p (arg01, arg11, 0))
6746 same = arg01, alt0 = arg00, alt1 = arg10;
6747 else if (operand_equal_p (arg00, arg10, 0))
6748 same = arg00, alt0 = arg01, alt1 = arg11;
6749 else if (operand_equal_p (arg00, arg11, 0))
6750 same = arg00, alt0 = arg01, alt1 = arg10;
6751 else if (operand_equal_p (arg01, arg10, 0))
6752 same = arg01, alt0 = arg00, alt1 = arg11;
6754 /* No identical multiplicands; see if we can find a common
6755 power-of-two factor in non-power-of-two multiplies. This
6756 can help in multi-dimensional array access. */
6757 else if (host_integerp (arg01, 0)
6758 && host_integerp (arg11, 0))
6760 HOST_WIDE_INT int01, int11, tmp;
6763 int01 = TREE_INT_CST_LOW (arg01);
6764 int11 = TREE_INT_CST_LOW (arg11);
6766 /* Move min of absolute values to int11. */
6767 if ((int01 >= 0 ? int01 : -int01)
6768 < (int11 >= 0 ? int11 : -int11))
6770 tmp = int01, int01 = int11, int11 = tmp;
6771 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6778 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6780 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6781 build_int_cst (TREE_TYPE (arg00),
6786 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6791 return fold_build2 (MULT_EXPR, type,
6792 fold_build2 (code, type,
6793 fold_convert (type, alt0),
6794 fold_convert (type, alt1)),
6795 fold_convert (type, same));
6800 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6801 specified by EXPR into the buffer PTR of length LEN bytes.
6802 Return the number of bytes placed in the buffer, or zero
6806 native_encode_int (tree expr, unsigned char *ptr, int len)
6808 tree type = TREE_TYPE (expr);
6809 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6810 int byte, offset, word, words;
6811 unsigned char value;
6813 if (total_bytes > len)
6815 words = total_bytes / UNITS_PER_WORD;
6817 for (byte = 0; byte < total_bytes; byte++)
6819 int bitpos = byte * BITS_PER_UNIT;
6820 if (bitpos < HOST_BITS_PER_WIDE_INT)
6821 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6823 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6824 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6826 if (total_bytes > UNITS_PER_WORD)
6828 word = byte / UNITS_PER_WORD;
6829 if (WORDS_BIG_ENDIAN)
6830 word = (words - 1) - word;
6831 offset = word * UNITS_PER_WORD;
6832 if (BYTES_BIG_ENDIAN)
6833 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6835 offset += byte % UNITS_PER_WORD;
6838 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6839 ptr[offset] = value;
6845 /* Subroutine of native_encode_expr. Encode the REAL_CST
6846 specified by EXPR into the buffer PTR of length LEN bytes.
6847 Return the number of bytes placed in the buffer, or zero
6851 native_encode_real (tree expr, unsigned char *ptr, int len)
6853 tree type = TREE_TYPE (expr);
6854 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6855 int byte, offset, word, words;
6856 unsigned char value;
6858 /* There are always 32 bits in each long, no matter the size of
6859 the hosts long. We handle floating point representations with
6863 if (total_bytes > len)
6865 words = total_bytes / UNITS_PER_WORD;
6867 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6869 for (byte = 0; byte < total_bytes; byte++)
6871 int bitpos = byte * BITS_PER_UNIT;
6872 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6874 if (total_bytes > UNITS_PER_WORD)
6876 word = byte / UNITS_PER_WORD;
6877 if (FLOAT_WORDS_BIG_ENDIAN)
6878 word = (words - 1) - word;
6879 offset = word * UNITS_PER_WORD;
6880 if (BYTES_BIG_ENDIAN)
6881 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6883 offset += byte % UNITS_PER_WORD;
6886 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6887 ptr[offset] = value;
6892 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6893 specified by EXPR into the buffer PTR of length LEN bytes.
6894 Return the number of bytes placed in the buffer, or zero
6898 native_encode_complex (tree expr, unsigned char *ptr, int len)
6903 part = TREE_REALPART (expr);
6904 rsize = native_encode_expr (part, ptr, len);
6907 part = TREE_IMAGPART (expr);
6908 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6911 return rsize + isize;
6915 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6916 specified by EXPR into the buffer PTR of length LEN bytes.
6917 Return the number of bytes placed in the buffer, or zero
6921 native_encode_vector (tree expr, unsigned char *ptr, int len)
6923 int i, size, offset, count;
6924 tree itype, elem, elements;
6927 elements = TREE_VECTOR_CST_ELTS (expr);
6928 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6929 itype = TREE_TYPE (TREE_TYPE (expr));
6930 size = GET_MODE_SIZE (TYPE_MODE (itype));
6931 for (i = 0; i < count; i++)
6935 elem = TREE_VALUE (elements);
6936 elements = TREE_CHAIN (elements);
6943 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6948 if (offset + size > len)
6950 memset (ptr+offset, 0, size);
6958 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6959 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6960 buffer PTR of length LEN bytes. Return the number of bytes
6961 placed in the buffer, or zero upon failure. */
6964 native_encode_expr (tree expr, unsigned char *ptr, int len)
6966 switch (TREE_CODE (expr))
6969 return native_encode_int (expr, ptr, len);
6972 return native_encode_real (expr, ptr, len);
6975 return native_encode_complex (expr, ptr, len);
6978 return native_encode_vector (expr, ptr, len);
6986 /* Subroutine of native_interpret_expr. Interpret the contents of
6987 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6988 If the buffer cannot be interpreted, return NULL_TREE. */
6991 native_interpret_int (tree type, unsigned char *ptr, int len)
6993 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6994 int byte, offset, word, words;
6995 unsigned char value;
6996 unsigned int HOST_WIDE_INT lo = 0;
6997 HOST_WIDE_INT hi = 0;
6999 if (total_bytes > len)
7001 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7003 words = total_bytes / UNITS_PER_WORD;
7005 for (byte = 0; byte < total_bytes; byte++)
7007 int bitpos = byte * BITS_PER_UNIT;
7008 if (total_bytes > UNITS_PER_WORD)
7010 word = byte / UNITS_PER_WORD;
7011 if (WORDS_BIG_ENDIAN)
7012 word = (words - 1) - word;
7013 offset = word * UNITS_PER_WORD;
7014 if (BYTES_BIG_ENDIAN)
7015 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7017 offset += byte % UNITS_PER_WORD;
7020 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7021 value = ptr[offset];
7023 if (bitpos < HOST_BITS_PER_WIDE_INT)
7024 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7026 hi |= (unsigned HOST_WIDE_INT) value
7027 << (bitpos - HOST_BITS_PER_WIDE_INT);
7030 return force_fit_type (build_int_cst_wide (type, lo, hi),
7035 /* Subroutine of native_interpret_expr. Interpret the contents of
7036 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7037 If the buffer cannot be interpreted, return NULL_TREE. */
7040 native_interpret_real (tree type, unsigned char *ptr, int len)
7042 enum machine_mode mode = TYPE_MODE (type);
7043 int total_bytes = GET_MODE_SIZE (mode);
7044 int byte, offset, word, words;
7045 unsigned char value;
7046 /* There are always 32 bits in each long, no matter the size of
7047 the hosts long. We handle floating point representations with
7052 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7053 if (total_bytes > len || total_bytes > 24)
7055 words = total_bytes / UNITS_PER_WORD;
7057 memset (tmp, 0, sizeof (tmp));
7058 for (byte = 0; byte < total_bytes; byte++)
7060 int bitpos = byte * BITS_PER_UNIT;
7061 if (total_bytes > UNITS_PER_WORD)
7063 word = byte / UNITS_PER_WORD;
7064 if (FLOAT_WORDS_BIG_ENDIAN)
7065 word = (words - 1) - word;
7066 offset = word * UNITS_PER_WORD;
7067 if (BYTES_BIG_ENDIAN)
7068 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7070 offset += byte % UNITS_PER_WORD;
7073 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7074 value = ptr[offset];
7076 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7079 real_from_target (&r, tmp, mode);
7080 return build_real (type, r);
7084 /* Subroutine of native_interpret_expr. Interpret the contents of
7085 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7086 If the buffer cannot be interpreted, return NULL_TREE. */
7089 native_interpret_complex (tree type, unsigned char *ptr, int len)
7091 tree etype, rpart, ipart;
7094 etype = TREE_TYPE (type);
7095 size = GET_MODE_SIZE (TYPE_MODE (etype));
7098 rpart = native_interpret_expr (etype, ptr, size);
7101 ipart = native_interpret_expr (etype, ptr+size, size);
7104 return build_complex (type, rpart, ipart);
7108 /* Subroutine of native_interpret_expr. Interpret the contents of
7109 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7110 If the buffer cannot be interpreted, return NULL_TREE. */
7113 native_interpret_vector (tree type, unsigned char *ptr, int len)
7115 tree etype, elem, elements;
7118 etype = TREE_TYPE (type);
7119 size = GET_MODE_SIZE (TYPE_MODE (etype));
7120 count = TYPE_VECTOR_SUBPARTS (type);
7121 if (size * count > len)
7124 elements = NULL_TREE;
7125 for (i = count - 1; i >= 0; i--)
7127 elem = native_interpret_expr (etype, ptr+(i*size), size);
7130 elements = tree_cons (NULL_TREE, elem, elements);
7132 return build_vector (type, elements);
7136 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7137 the buffer PTR of length LEN as a constant of type TYPE. For
7138 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7139 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7140 return NULL_TREE. */
7143 native_interpret_expr (tree type, unsigned char *ptr, int len)
7145 switch (TREE_CODE (type))
7150 return native_interpret_int (type, ptr, len);
7153 return native_interpret_real (type, ptr, len);
7156 return native_interpret_complex (type, ptr, len);
7159 return native_interpret_vector (type, ptr, len);
7167 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7168 TYPE at compile-time. If we're unable to perform the conversion
7169 return NULL_TREE. */
7172 fold_view_convert_expr (tree type, tree expr)
7174 /* We support up to 512-bit values (for V8DFmode). */
7175 unsigned char buffer[64];
7178 /* Check that the host and target are sane. */
7179 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7182 len = native_encode_expr (expr, buffer, sizeof (buffer));
7186 return native_interpret_expr (type, buffer, len);
7190 /* Fold a unary expression of code CODE and type TYPE with operand
7191 OP0. Return the folded expression if folding is successful.
7192 Otherwise, return NULL_TREE. */
7195 fold_unary (enum tree_code code, tree type, tree op0)
7199 enum tree_code_class kind = TREE_CODE_CLASS (code);
7201 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7202 && TREE_CODE_LENGTH (code) == 1);
7207 if (code == NOP_EXPR || code == CONVERT_EXPR
7208 || code == FLOAT_EXPR || code == ABS_EXPR)
7210 /* Don't use STRIP_NOPS, because signedness of argument type
7212 STRIP_SIGN_NOPS (arg0);
7216 /* Strip any conversions that don't change the mode. This
7217 is safe for every expression, except for a comparison
7218 expression because its signedness is derived from its
7221 Note that this is done as an internal manipulation within
7222 the constant folder, in order to find the simplest
7223 representation of the arguments so that their form can be
7224 studied. In any cases, the appropriate type conversions
7225 should be put back in the tree that will get out of the
7231 if (TREE_CODE_CLASS (code) == tcc_unary)
7233 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7234 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7235 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7236 else if (TREE_CODE (arg0) == COND_EXPR)
7238 tree arg01 = TREE_OPERAND (arg0, 1);
7239 tree arg02 = TREE_OPERAND (arg0, 2);
7240 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7241 arg01 = fold_build1 (code, type, arg01);
7242 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7243 arg02 = fold_build1 (code, type, arg02);
7244 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7247 /* If this was a conversion, and all we did was to move into
7248 inside the COND_EXPR, bring it back out. But leave it if
7249 it is a conversion from integer to integer and the
7250 result precision is no wider than a word since such a
7251 conversion is cheap and may be optimized away by combine,
7252 while it couldn't if it were outside the COND_EXPR. Then return
7253 so we don't get into an infinite recursion loop taking the
7254 conversion out and then back in. */
7256 if ((code == NOP_EXPR || code == CONVERT_EXPR
7257 || code == NON_LVALUE_EXPR)
7258 && TREE_CODE (tem) == COND_EXPR
7259 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7260 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7261 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7262 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7263 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7264 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7265 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7267 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7268 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7269 || flag_syntax_only))
7270 tem = build1 (code, type,
7272 TREE_TYPE (TREE_OPERAND
7273 (TREE_OPERAND (tem, 1), 0)),
7274 TREE_OPERAND (tem, 0),
7275 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7276 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7279 else if (COMPARISON_CLASS_P (arg0))
7281 if (TREE_CODE (type) == BOOLEAN_TYPE)
7283 arg0 = copy_node (arg0);
7284 TREE_TYPE (arg0) = type;
7287 else if (TREE_CODE (type) != INTEGER_TYPE)
7288 return fold_build3 (COND_EXPR, type, arg0,
7289 fold_build1 (code, type,
7291 fold_build1 (code, type,
7292 integer_zero_node));
7301 case FIX_TRUNC_EXPR:
7303 case FIX_FLOOR_EXPR:
7304 case FIX_ROUND_EXPR:
7305 if (TREE_TYPE (op0) == type)
7308 /* If we have (type) (a CMP b) and type is an integral type, return
7309 new expression involving the new type. */
7310 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7311 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7312 TREE_OPERAND (op0, 1));
7314 /* Handle cases of two conversions in a row. */
7315 if (TREE_CODE (op0) == NOP_EXPR
7316 || TREE_CODE (op0) == CONVERT_EXPR)
7318 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7319 tree inter_type = TREE_TYPE (op0);
7320 int inside_int = INTEGRAL_TYPE_P (inside_type);
7321 int inside_ptr = POINTER_TYPE_P (inside_type);
7322 int inside_float = FLOAT_TYPE_P (inside_type);
7323 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7324 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7325 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7326 int inter_int = INTEGRAL_TYPE_P (inter_type);
7327 int inter_ptr = POINTER_TYPE_P (inter_type);
7328 int inter_float = FLOAT_TYPE_P (inter_type);
7329 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7330 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7331 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7332 int final_int = INTEGRAL_TYPE_P (type);
7333 int final_ptr = POINTER_TYPE_P (type);
7334 int final_float = FLOAT_TYPE_P (type);
7335 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7336 unsigned int final_prec = TYPE_PRECISION (type);
7337 int final_unsignedp = TYPE_UNSIGNED (type);
7339 /* In addition to the cases of two conversions in a row
7340 handled below, if we are converting something to its own
7341 type via an object of identical or wider precision, neither
7342 conversion is needed. */
7343 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7344 && (((inter_int || inter_ptr) && final_int)
7345 || (inter_float && final_float))
7346 && inter_prec >= final_prec)
7347 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7349 /* Likewise, if the intermediate and final types are either both
7350 float or both integer, we don't need the middle conversion if
7351 it is wider than the final type and doesn't change the signedness
7352 (for integers). Avoid this if the final type is a pointer
7353 since then we sometimes need the inner conversion. Likewise if
7354 the outer has a precision not equal to the size of its mode. */
7355 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7356 || (inter_float && inside_float)
7357 || (inter_vec && inside_vec))
7358 && inter_prec >= inside_prec
7359 && (inter_float || inter_vec
7360 || inter_unsignedp == inside_unsignedp)
7361 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7362 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7364 && (! final_vec || inter_prec == inside_prec))
7365 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7367 /* If we have a sign-extension of a zero-extended value, we can
7368 replace that by a single zero-extension. */
7369 if (inside_int && inter_int && final_int
7370 && inside_prec < inter_prec && inter_prec < final_prec
7371 && inside_unsignedp && !inter_unsignedp)
7372 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7374 /* Two conversions in a row are not needed unless:
7375 - some conversion is floating-point (overstrict for now), or
7376 - some conversion is a vector (overstrict for now), or
7377 - the intermediate type is narrower than both initial and
7379 - the intermediate type and innermost type differ in signedness,
7380 and the outermost type is wider than the intermediate, or
7381 - the initial type is a pointer type and the precisions of the
7382 intermediate and final types differ, or
7383 - the final type is a pointer type and the precisions of the
7384 initial and intermediate types differ.
7385 - the final type is a pointer type and the initial type not
7386 - the initial type is a pointer to an array and the final type
7388 if (! inside_float && ! inter_float && ! final_float
7389 && ! inside_vec && ! inter_vec && ! final_vec
7390 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7391 && ! (inside_int && inter_int
7392 && inter_unsignedp != inside_unsignedp
7393 && inter_prec < final_prec)
7394 && ((inter_unsignedp && inter_prec > inside_prec)
7395 == (final_unsignedp && final_prec > inter_prec))
7396 && ! (inside_ptr && inter_prec != final_prec)
7397 && ! (final_ptr && inside_prec != inter_prec)
7398 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7399 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7400 && final_ptr == inside_ptr
7402 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7403 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7404 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7407 /* Handle (T *)&A.B.C for A being of type T and B and C
7408 living at offset zero. This occurs frequently in
7409 C++ upcasting and then accessing the base. */
7410 if (TREE_CODE (op0) == ADDR_EXPR
7411 && POINTER_TYPE_P (type)
7412 && handled_component_p (TREE_OPERAND (op0, 0)))
7414 HOST_WIDE_INT bitsize, bitpos;
7416 enum machine_mode mode;
7417 int unsignedp, volatilep;
7418 tree base = TREE_OPERAND (op0, 0);
7419 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7420 &mode, &unsignedp, &volatilep, false);
7421 /* If the reference was to a (constant) zero offset, we can use
7422 the address of the base if it has the same base type
7423 as the result type. */
7424 if (! offset && bitpos == 0
7425 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7426 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7427 return fold_convert (type, build_fold_addr_expr (base));
7430 if (TREE_CODE (op0) == MODIFY_EXPR
7431 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7432 /* Detect assigning a bitfield. */
7433 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7434 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7436 /* Don't leave an assignment inside a conversion
7437 unless assigning a bitfield. */
7438 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7439 /* First do the assignment, then return converted constant. */
7440 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7441 TREE_NO_WARNING (tem) = 1;
7442 TREE_USED (tem) = 1;
7446 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7447 constants (if x has signed type, the sign bit cannot be set
7448 in c). This folds extension into the BIT_AND_EXPR. */
7449 if (INTEGRAL_TYPE_P (type)
7450 && TREE_CODE (type) != BOOLEAN_TYPE
7451 && TREE_CODE (op0) == BIT_AND_EXPR
7452 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7455 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7458 if (TYPE_UNSIGNED (TREE_TYPE (and))
7459 || (TYPE_PRECISION (type)
7460 <= TYPE_PRECISION (TREE_TYPE (and))))
7462 else if (TYPE_PRECISION (TREE_TYPE (and1))
7463 <= HOST_BITS_PER_WIDE_INT
7464 && host_integerp (and1, 1))
7466 unsigned HOST_WIDE_INT cst;
7468 cst = tree_low_cst (and1, 1);
7469 cst &= (HOST_WIDE_INT) -1
7470 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7471 change = (cst == 0);
7472 #ifdef LOAD_EXTEND_OP
7474 && !flag_syntax_only
7475 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7478 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7479 and0 = fold_convert (uns, and0);
7480 and1 = fold_convert (uns, and1);
7486 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7487 TREE_INT_CST_HIGH (and1));
7488 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7489 TREE_CONSTANT_OVERFLOW (and1));
7490 return fold_build2 (BIT_AND_EXPR, type,
7491 fold_convert (type, and0), tem);
7495 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7496 T2 being pointers to types of the same size. */
7497 if (POINTER_TYPE_P (type)
7498 && BINARY_CLASS_P (arg0)
7499 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7500 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7502 tree arg00 = TREE_OPERAND (arg0, 0);
7504 tree t1 = TREE_TYPE (arg00);
7505 tree tt0 = TREE_TYPE (t0);
7506 tree tt1 = TREE_TYPE (t1);
7507 tree s0 = TYPE_SIZE (tt0);
7508 tree s1 = TYPE_SIZE (tt1);
7510 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7511 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7512 TREE_OPERAND (arg0, 1));
7515 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7516 of the same precision, and X is a integer type not narrower than
7517 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7518 if (INTEGRAL_TYPE_P (type)
7519 && TREE_CODE (op0) == BIT_NOT_EXPR
7520 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7521 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7522 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7523 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7525 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7526 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7527 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7528 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7531 tem = fold_convert_const (code, type, arg0);
7532 return tem ? tem : NULL_TREE;
7534 case VIEW_CONVERT_EXPR:
7535 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7536 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7537 return fold_view_convert_expr (type, op0);
7540 tem = fold_negate_expr (arg0);
7542 return fold_convert (type, tem);
7546 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7547 return fold_abs_const (arg0, type);
7548 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7549 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7550 /* Convert fabs((double)float) into (double)fabsf(float). */
7551 else if (TREE_CODE (arg0) == NOP_EXPR
7552 && TREE_CODE (type) == REAL_TYPE)
7554 tree targ0 = strip_float_extensions (arg0);
7556 return fold_convert (type, fold_build1 (ABS_EXPR,
7560 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7561 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7564 /* Strip sign ops from argument. */
7565 if (TREE_CODE (type) == REAL_TYPE)
7567 tem = fold_strip_sign_ops (arg0);
7569 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7574 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7575 return fold_convert (type, arg0);
7576 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7578 tree itype = TREE_TYPE (type);
7579 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7580 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7581 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7583 if (TREE_CODE (arg0) == COMPLEX_CST)
7585 tree itype = TREE_TYPE (type);
7586 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7587 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7588 return build_complex (type, rpart, negate_expr (ipart));
7590 if (TREE_CODE (arg0) == CONJ_EXPR)
7591 return fold_convert (type, TREE_OPERAND (arg0, 0));
7595 if (TREE_CODE (arg0) == INTEGER_CST)
7596 return fold_not_const (arg0, type);
7597 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7598 return TREE_OPERAND (arg0, 0);
7599 /* Convert ~ (-A) to A - 1. */
7600 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7601 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7602 build_int_cst (type, 1));
7603 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7604 else if (INTEGRAL_TYPE_P (type)
7605 && ((TREE_CODE (arg0) == MINUS_EXPR
7606 && integer_onep (TREE_OPERAND (arg0, 1)))
7607 || (TREE_CODE (arg0) == PLUS_EXPR
7608 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7609 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7610 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7611 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7612 && (tem = fold_unary (BIT_NOT_EXPR, type,
7614 TREE_OPERAND (arg0, 0)))))
7615 return fold_build2 (BIT_XOR_EXPR, type, tem,
7616 fold_convert (type, TREE_OPERAND (arg0, 1)));
7617 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7618 && (tem = fold_unary (BIT_NOT_EXPR, type,
7620 TREE_OPERAND (arg0, 1)))))
7621 return fold_build2 (BIT_XOR_EXPR, type,
7622 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7626 case TRUTH_NOT_EXPR:
7627 /* The argument to invert_truthvalue must have Boolean type. */
7628 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7629 arg0 = fold_convert (boolean_type_node, arg0);
7631 /* Note that the operand of this must be an int
7632 and its values must be 0 or 1.
7633 ("true" is a fixed value perhaps depending on the language,
7634 but we don't handle values other than 1 correctly yet.) */
7635 tem = fold_truth_not_expr (arg0);
7638 return fold_convert (type, tem);
7641 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7642 return fold_convert (type, arg0);
7643 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7644 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7645 TREE_OPERAND (arg0, 1));
7646 if (TREE_CODE (arg0) == COMPLEX_CST)
7647 return fold_convert (type, TREE_REALPART (arg0));
7648 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7650 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7651 tem = fold_build2 (TREE_CODE (arg0), itype,
7652 fold_build1 (REALPART_EXPR, itype,
7653 TREE_OPERAND (arg0, 0)),
7654 fold_build1 (REALPART_EXPR, itype,
7655 TREE_OPERAND (arg0, 1)));
7656 return fold_convert (type, tem);
7658 if (TREE_CODE (arg0) == CONJ_EXPR)
7660 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7661 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7662 return fold_convert (type, tem);
7667 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7668 return fold_convert (type, integer_zero_node);
7669 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7670 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7671 TREE_OPERAND (arg0, 0));
7672 if (TREE_CODE (arg0) == COMPLEX_CST)
7673 return fold_convert (type, TREE_IMAGPART (arg0));
7674 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7676 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7677 tem = fold_build2 (TREE_CODE (arg0), itype,
7678 fold_build1 (IMAGPART_EXPR, itype,
7679 TREE_OPERAND (arg0, 0)),
7680 fold_build1 (IMAGPART_EXPR, itype,
7681 TREE_OPERAND (arg0, 1)));
7682 return fold_convert (type, tem);
7684 if (TREE_CODE (arg0) == CONJ_EXPR)
7686 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7687 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7688 return fold_convert (type, negate_expr (tem));
7694 } /* switch (code) */
7697 /* Fold a binary expression of code CODE and type TYPE with operands
7698 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7699 Return the folded expression if folding is successful. Otherwise,
7700 return NULL_TREE. */
7703 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7705 enum tree_code compl_code;
7707 if (code == MIN_EXPR)
7708 compl_code = MAX_EXPR;
7709 else if (code == MAX_EXPR)
7710 compl_code = MIN_EXPR;
7714 /* MIN (MAX (a, b), b) == b. Â */
7715 if (TREE_CODE (op0) == compl_code
7716 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7717 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7719 /* MIN (MAX (b, a), b) == b. Â */
7720 if (TREE_CODE (op0) == compl_code
7721 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7722 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7723 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7725 /* MIN (a, MAX (a, b)) == a. Â */
7726 if (TREE_CODE (op1) == compl_code
7727 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7728 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7729 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7731 /* MIN (a, MAX (b, a)) == a. Â */
7732 if (TREE_CODE (op1) == compl_code
7733 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7734 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7735 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7740 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7741 by changing CODE to reduce the magnitude of constants involved in
7742 ARG0 of the comparison.
7743 Returns a canonicalized comparison tree if a simplification was
7744 possible, otherwise returns NULL_TREE. */
7747 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7748 tree arg0, tree arg1)
7750 enum tree_code code0 = TREE_CODE (arg0);
7751 tree t, cst0 = NULL_TREE;
7755 /* Match A +- CST code arg1 and CST code arg1. */
7756 if (!(((code0 == MINUS_EXPR
7757 || code0 == PLUS_EXPR)
7758 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7759 || code0 == INTEGER_CST))
7762 /* Identify the constant in arg0 and its sign. */
7763 if (code0 == INTEGER_CST)
7766 cst0 = TREE_OPERAND (arg0, 1);
7767 sgn0 = tree_int_cst_sgn (cst0);
7769 /* Overflowed constants and zero will cause problems. */
7770 if (integer_zerop (cst0)
7771 || TREE_OVERFLOW (cst0))
7774 /* See if we can reduce the mangitude of the constant in
7775 arg0 by changing the comparison code. */
7776 if (code0 == INTEGER_CST)
7778 /* CST <= arg1 -> CST-1 < arg1. */
7779 if (code == LE_EXPR && sgn0 == 1)
7781 /* -CST < arg1 -> -CST-1 <= arg1. */
7782 else if (code == LT_EXPR && sgn0 == -1)
7784 /* CST > arg1 -> CST-1 >= arg1. */
7785 else if (code == GT_EXPR && sgn0 == 1)
7787 /* -CST >= arg1 -> -CST-1 > arg1. */
7788 else if (code == GE_EXPR && sgn0 == -1)
7792 /* arg1 code' CST' might be more canonical. */
7797 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7799 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7801 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7802 else if (code == GT_EXPR
7803 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7805 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7806 else if (code == LE_EXPR
7807 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7809 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7810 else if (code == GE_EXPR
7811 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7817 /* Now build the constant reduced in magnitude. */
7818 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7819 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7820 if (code0 != INTEGER_CST)
7821 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7823 /* If swapping might yield to a more canonical form, do so. */
7825 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7827 return fold_build2 (code, type, t, arg1);
7830 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7831 overflow further. Try to decrease the magnitude of constants involved
7832 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7833 and put sole constants at the second argument position.
7834 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7837 maybe_canonicalize_comparison (enum tree_code code, tree type,
7838 tree arg0, tree arg1)
7842 /* In principle pointers also have undefined overflow behavior,
7843 but that causes problems elsewhere. */
7844 if ((flag_wrapv || flag_trapv)
7845 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7846 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7849 /* Try canonicalization by simplifying arg0. */
7850 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7854 /* Try canonicalization by simplifying arg1 using the swapped
7856 code = swap_tree_comparison (code);
7857 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7860 /* Subroutine of fold_binary. This routine performs all of the
7861 transformations that are common to the equality/inequality
7862 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7863 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7864 fold_binary should call fold_binary. Fold a comparison with
7865 tree code CODE and type TYPE with operands OP0 and OP1. Return
7866 the folded comparison or NULL_TREE. */
7869 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7871 tree arg0, arg1, tem;
7876 STRIP_SIGN_NOPS (arg0);
7877 STRIP_SIGN_NOPS (arg1);
7879 tem = fold_relational_const (code, type, arg0, arg1);
7880 if (tem != NULL_TREE)
7883 /* If one arg is a real or integer constant, put it last. */
7884 if (tree_swap_operands_p (arg0, arg1, true))
7885 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7887 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7888 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7889 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7890 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7891 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7892 && !(flag_wrapv || flag_trapv))
7893 && (TREE_CODE (arg1) == INTEGER_CST
7894 && !TREE_OVERFLOW (arg1)))
7896 tree const1 = TREE_OPERAND (arg0, 1);
7898 tree variable = TREE_OPERAND (arg0, 0);
7901 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7903 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7904 TREE_TYPE (arg1), const2, const1);
7905 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7906 && (TREE_CODE (lhs) != INTEGER_CST
7907 || !TREE_OVERFLOW (lhs)))
7908 return fold_build2 (code, type, variable, lhs);
7911 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7912 same object, then we can fold this to a comparison of the two offsets in
7913 signed size type. This is possible because pointer arithmetic is
7914 restricted to retain within an object and overflow on pointer differences
7915 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7916 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7917 && !flag_wrapv && !flag_trapv)
7919 tree base0, offset0, base1, offset1;
7921 if (extract_array_ref (arg0, &base0, &offset0)
7922 && extract_array_ref (arg1, &base1, &offset1)
7923 && operand_equal_p (base0, base1, 0))
7925 tree signed_size_type_node;
7926 signed_size_type_node = signed_type_for (size_type_node);
7928 /* By converting to signed size type we cover middle-end pointer
7929 arithmetic which operates on unsigned pointer types of size
7930 type size and ARRAY_REF offsets which are properly sign or
7931 zero extended from their type in case it is narrower than
7933 if (offset0 == NULL_TREE)
7934 offset0 = build_int_cst (signed_size_type_node, 0);
7936 offset0 = fold_convert (signed_size_type_node, offset0);
7937 if (offset1 == NULL_TREE)
7938 offset1 = build_int_cst (signed_size_type_node, 0);
7940 offset1 = fold_convert (signed_size_type_node, offset1);
7942 return fold_build2 (code, type, offset0, offset1);
7946 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
7947 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
7948 the resulting offset is smaller in absolute value than the
7950 if (!(flag_wrapv || flag_trapv)
7951 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7952 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7953 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7954 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
7955 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
7956 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7957 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
7959 tree const1 = TREE_OPERAND (arg0, 1);
7960 tree const2 = TREE_OPERAND (arg1, 1);
7961 tree variable1 = TREE_OPERAND (arg0, 0);
7962 tree variable2 = TREE_OPERAND (arg1, 0);
7965 /* Put the constant on the side where it doesn't overflow and is
7966 of lower absolute value than before. */
7967 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
7968 ? MINUS_EXPR : PLUS_EXPR,
7970 if (!TREE_OVERFLOW (cst)
7971 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
7972 return fold_build2 (code, type,
7974 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
7977 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
7978 ? MINUS_EXPR : PLUS_EXPR,
7980 if (!TREE_OVERFLOW (cst)
7981 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
7982 return fold_build2 (code, type,
7983 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
7988 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
7992 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7994 tree targ0 = strip_float_extensions (arg0);
7995 tree targ1 = strip_float_extensions (arg1);
7996 tree newtype = TREE_TYPE (targ0);
7998 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7999 newtype = TREE_TYPE (targ1);
8001 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8002 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8003 return fold_build2 (code, type, fold_convert (newtype, targ0),
8004 fold_convert (newtype, targ1));
8006 /* (-a) CMP (-b) -> b CMP a */
8007 if (TREE_CODE (arg0) == NEGATE_EXPR
8008 && TREE_CODE (arg1) == NEGATE_EXPR)
8009 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8010 TREE_OPERAND (arg0, 0));
8012 if (TREE_CODE (arg1) == REAL_CST)
8014 REAL_VALUE_TYPE cst;
8015 cst = TREE_REAL_CST (arg1);
8017 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8018 if (TREE_CODE (arg0) == NEGATE_EXPR)
8019 return fold_build2 (swap_tree_comparison (code), type,
8020 TREE_OPERAND (arg0, 0),
8021 build_real (TREE_TYPE (arg1),
8022 REAL_VALUE_NEGATE (cst)));
8024 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8025 /* a CMP (-0) -> a CMP 0 */
8026 if (REAL_VALUE_MINUS_ZERO (cst))
8027 return fold_build2 (code, type, arg0,
8028 build_real (TREE_TYPE (arg1), dconst0));
8030 /* x != NaN is always true, other ops are always false. */
8031 if (REAL_VALUE_ISNAN (cst)
8032 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8034 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8035 return omit_one_operand (type, tem, arg0);
8038 /* Fold comparisons against infinity. */
8039 if (REAL_VALUE_ISINF (cst))
8041 tem = fold_inf_compare (code, type, arg0, arg1);
8042 if (tem != NULL_TREE)
8047 /* If this is a comparison of a real constant with a PLUS_EXPR
8048 or a MINUS_EXPR of a real constant, we can convert it into a
8049 comparison with a revised real constant as long as no overflow
8050 occurs when unsafe_math_optimizations are enabled. */
8051 if (flag_unsafe_math_optimizations
8052 && TREE_CODE (arg1) == REAL_CST
8053 && (TREE_CODE (arg0) == PLUS_EXPR
8054 || TREE_CODE (arg0) == MINUS_EXPR)
8055 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8056 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8057 ? MINUS_EXPR : PLUS_EXPR,
8058 arg1, TREE_OPERAND (arg0, 1), 0))
8059 && ! TREE_CONSTANT_OVERFLOW (tem))
8060 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8062 /* Likewise, we can simplify a comparison of a real constant with
8063 a MINUS_EXPR whose first operand is also a real constant, i.e.
8064 (c1 - x) < c2 becomes x > c1-c2. */
8065 if (flag_unsafe_math_optimizations
8066 && TREE_CODE (arg1) == REAL_CST
8067 && TREE_CODE (arg0) == MINUS_EXPR
8068 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8069 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8071 && ! TREE_CONSTANT_OVERFLOW (tem))
8072 return fold_build2 (swap_tree_comparison (code), type,
8073 TREE_OPERAND (arg0, 1), tem);
8075 /* Fold comparisons against built-in math functions. */
8076 if (TREE_CODE (arg1) == REAL_CST
8077 && flag_unsafe_math_optimizations
8078 && ! flag_errno_math)
8080 enum built_in_function fcode = builtin_mathfn_code (arg0);
8082 if (fcode != END_BUILTINS)
8084 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8085 if (tem != NULL_TREE)
8091 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8092 if (TREE_CONSTANT (arg1)
8093 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8094 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8095 /* This optimization is invalid for ordered comparisons
8096 if CONST+INCR overflows or if foo+incr might overflow.
8097 This optimization is invalid for floating point due to rounding.
8098 For pointer types we assume overflow doesn't happen. */
8099 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8100 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8101 && (code == EQ_EXPR || code == NE_EXPR))))
8103 tree varop, newconst;
8105 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8107 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8108 arg1, TREE_OPERAND (arg0, 1));
8109 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8110 TREE_OPERAND (arg0, 0),
8111 TREE_OPERAND (arg0, 1));
8115 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8116 arg1, TREE_OPERAND (arg0, 1));
8117 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8118 TREE_OPERAND (arg0, 0),
8119 TREE_OPERAND (arg0, 1));
8123 /* If VAROP is a reference to a bitfield, we must mask
8124 the constant by the width of the field. */
8125 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8126 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8127 && host_integerp (DECL_SIZE (TREE_OPERAND
8128 (TREE_OPERAND (varop, 0), 1)), 1))
8130 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8131 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8132 tree folded_compare, shift;
8134 /* First check whether the comparison would come out
8135 always the same. If we don't do that we would
8136 change the meaning with the masking. */
8137 folded_compare = fold_build2 (code, type,
8138 TREE_OPERAND (varop, 0), arg1);
8139 if (TREE_CODE (folded_compare) == INTEGER_CST)
8140 return omit_one_operand (type, folded_compare, varop);
8142 shift = build_int_cst (NULL_TREE,
8143 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8144 shift = fold_convert (TREE_TYPE (varop), shift);
8145 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8147 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8151 return fold_build2 (code, type, varop, newconst);
8154 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8155 && (TREE_CODE (arg0) == NOP_EXPR
8156 || TREE_CODE (arg0) == CONVERT_EXPR))
8158 /* If we are widening one operand of an integer comparison,
8159 see if the other operand is similarly being widened. Perhaps we
8160 can do the comparison in the narrower type. */
8161 tem = fold_widened_comparison (code, type, arg0, arg1);
8165 /* Or if we are changing signedness. */
8166 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8171 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8172 constant, we can simplify it. */
8173 if (TREE_CODE (arg1) == INTEGER_CST
8174 && (TREE_CODE (arg0) == MIN_EXPR
8175 || TREE_CODE (arg0) == MAX_EXPR)
8176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8178 tem = optimize_minmax_comparison (code, type, op0, op1);
8183 /* Simplify comparison of something with itself. (For IEEE
8184 floating-point, we can only do some of these simplifications.) */
8185 if (operand_equal_p (arg0, arg1, 0))
8190 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8191 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8192 return constant_boolean_node (1, type);
8197 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8198 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8199 return constant_boolean_node (1, type);
8200 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8203 /* For NE, we can only do this simplification if integer
8204 or we don't honor IEEE floating point NaNs. */
8205 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8206 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8208 /* ... fall through ... */
8211 return constant_boolean_node (0, type);
8217 /* If we are comparing an expression that just has comparisons
8218 of two integer values, arithmetic expressions of those comparisons,
8219 and constants, we can simplify it. There are only three cases
8220 to check: the two values can either be equal, the first can be
8221 greater, or the second can be greater. Fold the expression for
8222 those three values. Since each value must be 0 or 1, we have
8223 eight possibilities, each of which corresponds to the constant 0
8224 or 1 or one of the six possible comparisons.
8226 This handles common cases like (a > b) == 0 but also handles
8227 expressions like ((x > y) - (y > x)) > 0, which supposedly
8228 occur in macroized code. */
8230 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8232 tree cval1 = 0, cval2 = 0;
8235 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8236 /* Don't handle degenerate cases here; they should already
8237 have been handled anyway. */
8238 && cval1 != 0 && cval2 != 0
8239 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8240 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8241 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8242 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8243 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8244 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8245 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8247 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8248 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8250 /* We can't just pass T to eval_subst in case cval1 or cval2
8251 was the same as ARG1. */
8254 = fold_build2 (code, type,
8255 eval_subst (arg0, cval1, maxval,
8259 = fold_build2 (code, type,
8260 eval_subst (arg0, cval1, maxval,
8264 = fold_build2 (code, type,
8265 eval_subst (arg0, cval1, minval,
8269 /* All three of these results should be 0 or 1. Confirm they are.
8270 Then use those values to select the proper code to use. */
8272 if (TREE_CODE (high_result) == INTEGER_CST
8273 && TREE_CODE (equal_result) == INTEGER_CST
8274 && TREE_CODE (low_result) == INTEGER_CST)
8276 /* Make a 3-bit mask with the high-order bit being the
8277 value for `>', the next for '=', and the low for '<'. */
8278 switch ((integer_onep (high_result) * 4)
8279 + (integer_onep (equal_result) * 2)
8280 + integer_onep (low_result))
8284 return omit_one_operand (type, integer_zero_node, arg0);
8305 return omit_one_operand (type, integer_one_node, arg0);
8309 return save_expr (build2 (code, type, cval1, cval2));
8310 return fold_build2 (code, type, cval1, cval2);
8315 /* Fold a comparison of the address of COMPONENT_REFs with the same
8316 type and component to a comparison of the address of the base
8317 object. In short, &x->a OP &y->a to x OP y and
8318 &x->a OP &y.a to x OP &y */
8319 if (TREE_CODE (arg0) == ADDR_EXPR
8320 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8321 && TREE_CODE (arg1) == ADDR_EXPR
8322 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8324 tree cref0 = TREE_OPERAND (arg0, 0);
8325 tree cref1 = TREE_OPERAND (arg1, 0);
8326 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8328 tree op0 = TREE_OPERAND (cref0, 0);
8329 tree op1 = TREE_OPERAND (cref1, 0);
8330 return fold_build2 (code, type,
8331 build_fold_addr_expr (op0),
8332 build_fold_addr_expr (op1));
8336 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8337 into a single range test. */
8338 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8339 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8340 && TREE_CODE (arg1) == INTEGER_CST
8341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8342 && !integer_zerop (TREE_OPERAND (arg0, 1))
8343 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8344 && !TREE_OVERFLOW (arg1))
8346 tem = fold_div_compare (code, type, arg0, arg1);
8347 if (tem != NULL_TREE)
8351 /* Fold ~X op ~Y as Y op X. */
8352 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8353 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8354 return fold_build2 (code, type,
8355 TREE_OPERAND (arg1, 0),
8356 TREE_OPERAND (arg0, 0));
8358 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8359 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8360 && TREE_CODE (arg1) == INTEGER_CST)
8361 return fold_build2 (swap_tree_comparison (code), type,
8362 TREE_OPERAND (arg0, 0),
8363 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8369 /* Subroutine of fold_binary. Optimize complex multiplications of the
8370 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8371 argument EXPR represents the expression "z" of type TYPE. */
8374 fold_mult_zconjz (tree type, tree expr)
8376 tree itype = TREE_TYPE (type);
8377 tree rpart, ipart, tem;
8379 if (TREE_CODE (expr) == COMPLEX_EXPR)
8381 rpart = TREE_OPERAND (expr, 0);
8382 ipart = TREE_OPERAND (expr, 1);
8384 else if (TREE_CODE (expr) == COMPLEX_CST)
8386 rpart = TREE_REALPART (expr);
8387 ipart = TREE_IMAGPART (expr);
8391 expr = save_expr (expr);
8392 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8393 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8396 rpart = save_expr (rpart);
8397 ipart = save_expr (ipart);
8398 tem = fold_build2 (PLUS_EXPR, itype,
8399 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8400 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8401 return fold_build2 (COMPLEX_EXPR, type, tem,
8402 fold_convert (itype, integer_zero_node));
8406 /* Fold a binary expression of code CODE and type TYPE with operands
8407 OP0 and OP1. Return the folded expression if folding is
8408 successful. Otherwise, return NULL_TREE. */
8411 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8413 enum tree_code_class kind = TREE_CODE_CLASS (code);
8414 tree arg0, arg1, tem;
8415 tree t1 = NULL_TREE;
8417 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8418 && TREE_CODE_LENGTH (code) == 2
8420 && op1 != NULL_TREE);
8425 /* Strip any conversions that don't change the mode. This is
8426 safe for every expression, except for a comparison expression
8427 because its signedness is derived from its operands. So, in
8428 the latter case, only strip conversions that don't change the
8431 Note that this is done as an internal manipulation within the
8432 constant folder, in order to find the simplest representation
8433 of the arguments so that their form can be studied. In any
8434 cases, the appropriate type conversions should be put back in
8435 the tree that will get out of the constant folder. */
8437 if (kind == tcc_comparison)
8439 STRIP_SIGN_NOPS (arg0);
8440 STRIP_SIGN_NOPS (arg1);
8448 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8449 constant but we can't do arithmetic on them. */
8450 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8451 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8452 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8453 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8455 if (kind == tcc_binary)
8456 tem = const_binop (code, arg0, arg1, 0);
8457 else if (kind == tcc_comparison)
8458 tem = fold_relational_const (code, type, arg0, arg1);
8462 if (tem != NULL_TREE)
8464 if (TREE_TYPE (tem) != type)
8465 tem = fold_convert (type, tem);
8470 /* If this is a commutative operation, and ARG0 is a constant, move it
8471 to ARG1 to reduce the number of tests below. */
8472 if (commutative_tree_code (code)
8473 && tree_swap_operands_p (arg0, arg1, true))
8474 return fold_build2 (code, type, op1, op0);
8476 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8478 First check for cases where an arithmetic operation is applied to a
8479 compound, conditional, or comparison operation. Push the arithmetic
8480 operation inside the compound or conditional to see if any folding
8481 can then be done. Convert comparison to conditional for this purpose.
8482 The also optimizes non-constant cases that used to be done in
8485 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8486 one of the operands is a comparison and the other is a comparison, a
8487 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8488 code below would make the expression more complex. Change it to a
8489 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8490 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8492 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8493 || code == EQ_EXPR || code == NE_EXPR)
8494 && ((truth_value_p (TREE_CODE (arg0))
8495 && (truth_value_p (TREE_CODE (arg1))
8496 || (TREE_CODE (arg1) == BIT_AND_EXPR
8497 && integer_onep (TREE_OPERAND (arg1, 1)))))
8498 || (truth_value_p (TREE_CODE (arg1))
8499 && (truth_value_p (TREE_CODE (arg0))
8500 || (TREE_CODE (arg0) == BIT_AND_EXPR
8501 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8503 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8504 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8507 fold_convert (boolean_type_node, arg0),
8508 fold_convert (boolean_type_node, arg1));
8510 if (code == EQ_EXPR)
8511 tem = invert_truthvalue (tem);
8513 return fold_convert (type, tem);
8516 if (TREE_CODE_CLASS (code) == tcc_binary
8517 || TREE_CODE_CLASS (code) == tcc_comparison)
8519 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8520 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8521 fold_build2 (code, type,
8522 TREE_OPERAND (arg0, 1), op1));
8523 if (TREE_CODE (arg1) == COMPOUND_EXPR
8524 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8525 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8526 fold_build2 (code, type,
8527 op0, TREE_OPERAND (arg1, 1)));
8529 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8531 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8533 /*cond_first_p=*/1);
8534 if (tem != NULL_TREE)
8538 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8540 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8542 /*cond_first_p=*/0);
8543 if (tem != NULL_TREE)
8551 /* A + (-B) -> A - B */
8552 if (TREE_CODE (arg1) == NEGATE_EXPR)
8553 return fold_build2 (MINUS_EXPR, type,
8554 fold_convert (type, arg0),
8555 fold_convert (type, TREE_OPERAND (arg1, 0)));
8556 /* (-A) + B -> B - A */
8557 if (TREE_CODE (arg0) == NEGATE_EXPR
8558 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8559 return fold_build2 (MINUS_EXPR, type,
8560 fold_convert (type, arg1),
8561 fold_convert (type, TREE_OPERAND (arg0, 0)));
8562 /* Convert ~A + 1 to -A. */
8563 if (INTEGRAL_TYPE_P (type)
8564 && TREE_CODE (arg0) == BIT_NOT_EXPR
8565 && integer_onep (arg1))
8566 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8568 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8570 if ((TREE_CODE (arg0) == MULT_EXPR
8571 || TREE_CODE (arg1) == MULT_EXPR)
8572 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8574 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8579 if (! FLOAT_TYPE_P (type))
8581 if (integer_zerop (arg1))
8582 return non_lvalue (fold_convert (type, arg0));
8584 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8585 with a constant, and the two constants have no bits in common,
8586 we should treat this as a BIT_IOR_EXPR since this may produce more
8588 if (TREE_CODE (arg0) == BIT_AND_EXPR
8589 && TREE_CODE (arg1) == BIT_AND_EXPR
8590 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8591 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8592 && integer_zerop (const_binop (BIT_AND_EXPR,
8593 TREE_OPERAND (arg0, 1),
8594 TREE_OPERAND (arg1, 1), 0)))
8596 code = BIT_IOR_EXPR;
8600 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8601 (plus (plus (mult) (mult)) (foo)) so that we can
8602 take advantage of the factoring cases below. */
8603 if (((TREE_CODE (arg0) == PLUS_EXPR
8604 || TREE_CODE (arg0) == MINUS_EXPR)
8605 && TREE_CODE (arg1) == MULT_EXPR)
8606 || ((TREE_CODE (arg1) == PLUS_EXPR
8607 || TREE_CODE (arg1) == MINUS_EXPR)
8608 && TREE_CODE (arg0) == MULT_EXPR))
8610 tree parg0, parg1, parg, marg;
8611 enum tree_code pcode;
8613 if (TREE_CODE (arg1) == MULT_EXPR)
8614 parg = arg0, marg = arg1;
8616 parg = arg1, marg = arg0;
8617 pcode = TREE_CODE (parg);
8618 parg0 = TREE_OPERAND (parg, 0);
8619 parg1 = TREE_OPERAND (parg, 1);
8623 if (TREE_CODE (parg0) == MULT_EXPR
8624 && TREE_CODE (parg1) != MULT_EXPR)
8625 return fold_build2 (pcode, type,
8626 fold_build2 (PLUS_EXPR, type,
8627 fold_convert (type, parg0),
8628 fold_convert (type, marg)),
8629 fold_convert (type, parg1));
8630 if (TREE_CODE (parg0) != MULT_EXPR
8631 && TREE_CODE (parg1) == MULT_EXPR)
8632 return fold_build2 (PLUS_EXPR, type,
8633 fold_convert (type, parg0),
8634 fold_build2 (pcode, type,
8635 fold_convert (type, marg),
8640 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8641 of the array. Loop optimizer sometimes produce this type of
8643 if (TREE_CODE (arg0) == ADDR_EXPR)
8645 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8647 return fold_convert (type, tem);
8649 else if (TREE_CODE (arg1) == ADDR_EXPR)
8651 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8653 return fold_convert (type, tem);
8658 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8659 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8660 return non_lvalue (fold_convert (type, arg0));
8662 /* Likewise if the operands are reversed. */
8663 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8664 return non_lvalue (fold_convert (type, arg1));
8666 /* Convert X + -C into X - C. */
8667 if (TREE_CODE (arg1) == REAL_CST
8668 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8670 tem = fold_negate_const (arg1, type);
8671 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8672 return fold_build2 (MINUS_EXPR, type,
8673 fold_convert (type, arg0),
8674 fold_convert (type, tem));
8677 if (flag_unsafe_math_optimizations
8678 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8679 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8680 && (tem = distribute_real_division (code, type, arg0, arg1)))
8683 /* Convert x+x into x*2.0. */
8684 if (operand_equal_p (arg0, arg1, 0)
8685 && SCALAR_FLOAT_TYPE_P (type))
8686 return fold_build2 (MULT_EXPR, type, arg0,
8687 build_real (type, dconst2));
8689 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8690 if (flag_unsafe_math_optimizations
8691 && TREE_CODE (arg1) == PLUS_EXPR
8692 && TREE_CODE (arg0) != MULT_EXPR)
8694 tree tree10 = TREE_OPERAND (arg1, 0);
8695 tree tree11 = TREE_OPERAND (arg1, 1);
8696 if (TREE_CODE (tree11) == MULT_EXPR
8697 && TREE_CODE (tree10) == MULT_EXPR)
8700 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8701 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8704 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8705 if (flag_unsafe_math_optimizations
8706 && TREE_CODE (arg0) == PLUS_EXPR
8707 && TREE_CODE (arg1) != MULT_EXPR)
8709 tree tree00 = TREE_OPERAND (arg0, 0);
8710 tree tree01 = TREE_OPERAND (arg0, 1);
8711 if (TREE_CODE (tree01) == MULT_EXPR
8712 && TREE_CODE (tree00) == MULT_EXPR)
8715 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8716 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8722 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8723 is a rotate of A by C1 bits. */
8724 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8725 is a rotate of A by B bits. */
8727 enum tree_code code0, code1;
8728 code0 = TREE_CODE (arg0);
8729 code1 = TREE_CODE (arg1);
8730 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8731 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8732 && operand_equal_p (TREE_OPERAND (arg0, 0),
8733 TREE_OPERAND (arg1, 0), 0)
8734 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8736 tree tree01, tree11;
8737 enum tree_code code01, code11;
8739 tree01 = TREE_OPERAND (arg0, 1);
8740 tree11 = TREE_OPERAND (arg1, 1);
8741 STRIP_NOPS (tree01);
8742 STRIP_NOPS (tree11);
8743 code01 = TREE_CODE (tree01);
8744 code11 = TREE_CODE (tree11);
8745 if (code01 == INTEGER_CST
8746 && code11 == INTEGER_CST
8747 && TREE_INT_CST_HIGH (tree01) == 0
8748 && TREE_INT_CST_HIGH (tree11) == 0
8749 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8750 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8751 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8752 code0 == LSHIFT_EXPR ? tree01 : tree11);
8753 else if (code11 == MINUS_EXPR)
8755 tree tree110, tree111;
8756 tree110 = TREE_OPERAND (tree11, 0);
8757 tree111 = TREE_OPERAND (tree11, 1);
8758 STRIP_NOPS (tree110);
8759 STRIP_NOPS (tree111);
8760 if (TREE_CODE (tree110) == INTEGER_CST
8761 && 0 == compare_tree_int (tree110,
8763 (TREE_TYPE (TREE_OPERAND
8765 && operand_equal_p (tree01, tree111, 0))
8766 return build2 ((code0 == LSHIFT_EXPR
8769 type, TREE_OPERAND (arg0, 0), tree01);
8771 else if (code01 == MINUS_EXPR)
8773 tree tree010, tree011;
8774 tree010 = TREE_OPERAND (tree01, 0);
8775 tree011 = TREE_OPERAND (tree01, 1);
8776 STRIP_NOPS (tree010);
8777 STRIP_NOPS (tree011);
8778 if (TREE_CODE (tree010) == INTEGER_CST
8779 && 0 == compare_tree_int (tree010,
8781 (TREE_TYPE (TREE_OPERAND
8783 && operand_equal_p (tree11, tree011, 0))
8784 return build2 ((code0 != LSHIFT_EXPR
8787 type, TREE_OPERAND (arg0, 0), tree11);
8793 /* In most languages, can't associate operations on floats through
8794 parentheses. Rather than remember where the parentheses were, we
8795 don't associate floats at all, unless the user has specified
8796 -funsafe-math-optimizations. */
8798 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8800 tree var0, con0, lit0, minus_lit0;
8801 tree var1, con1, lit1, minus_lit1;
8803 /* Split both trees into variables, constants, and literals. Then
8804 associate each group together, the constants with literals,
8805 then the result with variables. This increases the chances of
8806 literals being recombined later and of generating relocatable
8807 expressions for the sum of a constant and literal. */
8808 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8809 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8810 code == MINUS_EXPR);
8812 /* Only do something if we found more than two objects. Otherwise,
8813 nothing has changed and we risk infinite recursion. */
8814 if (2 < ((var0 != 0) + (var1 != 0)
8815 + (con0 != 0) + (con1 != 0)
8816 + (lit0 != 0) + (lit1 != 0)
8817 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8819 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8820 if (code == MINUS_EXPR)
8823 var0 = associate_trees (var0, var1, code, type);
8824 con0 = associate_trees (con0, con1, code, type);
8825 lit0 = associate_trees (lit0, lit1, code, type);
8826 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8828 /* Preserve the MINUS_EXPR if the negative part of the literal is
8829 greater than the positive part. Otherwise, the multiplicative
8830 folding code (i.e extract_muldiv) may be fooled in case
8831 unsigned constants are subtracted, like in the following
8832 example: ((X*2 + 4) - 8U)/2. */
8833 if (minus_lit0 && lit0)
8835 if (TREE_CODE (lit0) == INTEGER_CST
8836 && TREE_CODE (minus_lit0) == INTEGER_CST
8837 && tree_int_cst_lt (lit0, minus_lit0))
8839 minus_lit0 = associate_trees (minus_lit0, lit0,
8845 lit0 = associate_trees (lit0, minus_lit0,
8853 return fold_convert (type,
8854 associate_trees (var0, minus_lit0,
8858 con0 = associate_trees (con0, minus_lit0,
8860 return fold_convert (type,
8861 associate_trees (var0, con0,
8866 con0 = associate_trees (con0, lit0, code, type);
8867 return fold_convert (type, associate_trees (var0, con0,
8875 /* A - (-B) -> A + B */
8876 if (TREE_CODE (arg1) == NEGATE_EXPR)
8877 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8878 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8879 if (TREE_CODE (arg0) == NEGATE_EXPR
8880 && (FLOAT_TYPE_P (type)
8881 || INTEGRAL_TYPE_P (type))
8882 && negate_expr_p (arg1)
8883 && reorder_operands_p (arg0, arg1))
8884 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8885 TREE_OPERAND (arg0, 0));
8886 /* Convert -A - 1 to ~A. */
8887 if (INTEGRAL_TYPE_P (type)
8888 && TREE_CODE (arg0) == NEGATE_EXPR
8889 && integer_onep (arg1))
8890 return fold_build1 (BIT_NOT_EXPR, type,
8891 fold_convert (type, TREE_OPERAND (arg0, 0)));
8893 /* Convert -1 - A to ~A. */
8894 if (INTEGRAL_TYPE_P (type)
8895 && integer_all_onesp (arg0))
8896 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8898 if (! FLOAT_TYPE_P (type))
8900 if (integer_zerop (arg0))
8901 return negate_expr (fold_convert (type, arg1));
8902 if (integer_zerop (arg1))
8903 return non_lvalue (fold_convert (type, arg0));
8905 /* Fold A - (A & B) into ~B & A. */
8906 if (!TREE_SIDE_EFFECTS (arg0)
8907 && TREE_CODE (arg1) == BIT_AND_EXPR)
8909 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8910 return fold_build2 (BIT_AND_EXPR, type,
8911 fold_build1 (BIT_NOT_EXPR, type,
8912 TREE_OPERAND (arg1, 0)),
8914 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8915 return fold_build2 (BIT_AND_EXPR, type,
8916 fold_build1 (BIT_NOT_EXPR, type,
8917 TREE_OPERAND (arg1, 1)),
8921 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8922 any power of 2 minus 1. */
8923 if (TREE_CODE (arg0) == BIT_AND_EXPR
8924 && TREE_CODE (arg1) == BIT_AND_EXPR
8925 && operand_equal_p (TREE_OPERAND (arg0, 0),
8926 TREE_OPERAND (arg1, 0), 0))
8928 tree mask0 = TREE_OPERAND (arg0, 1);
8929 tree mask1 = TREE_OPERAND (arg1, 1);
8930 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8932 if (operand_equal_p (tem, mask1, 0))
8934 tem = fold_build2 (BIT_XOR_EXPR, type,
8935 TREE_OPERAND (arg0, 0), mask1);
8936 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8941 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8942 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8943 return non_lvalue (fold_convert (type, arg0));
8945 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8946 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8947 (-ARG1 + ARG0) reduces to -ARG1. */
8948 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8949 return negate_expr (fold_convert (type, arg1));
8951 /* Fold &x - &x. This can happen from &x.foo - &x.
8952 This is unsafe for certain floats even in non-IEEE formats.
8953 In IEEE, it is unsafe because it does wrong for NaNs.
8954 Also note that operand_equal_p is always false if an operand
8957 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8958 && operand_equal_p (arg0, arg1, 0))
8959 return fold_convert (type, integer_zero_node);
8961 /* A - B -> A + (-B) if B is easily negatable. */
8962 if (negate_expr_p (arg1)
8963 && ((FLOAT_TYPE_P (type)
8964 /* Avoid this transformation if B is a positive REAL_CST. */
8965 && (TREE_CODE (arg1) != REAL_CST
8966 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8967 || INTEGRAL_TYPE_P (type)))
8968 return fold_build2 (PLUS_EXPR, type,
8969 fold_convert (type, arg0),
8970 fold_convert (type, negate_expr (arg1)));
8972 /* Try folding difference of addresses. */
8976 if ((TREE_CODE (arg0) == ADDR_EXPR
8977 || TREE_CODE (arg1) == ADDR_EXPR)
8978 && ptr_difference_const (arg0, arg1, &diff))
8979 return build_int_cst_type (type, diff);
8982 /* Fold &a[i] - &a[j] to i-j. */
8983 if (TREE_CODE (arg0) == ADDR_EXPR
8984 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8985 && TREE_CODE (arg1) == ADDR_EXPR
8986 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8988 tree aref0 = TREE_OPERAND (arg0, 0);
8989 tree aref1 = TREE_OPERAND (arg1, 0);
8990 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8991 TREE_OPERAND (aref1, 0), 0))
8993 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8994 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8995 tree esz = array_ref_element_size (aref0);
8996 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8997 return fold_build2 (MULT_EXPR, type, diff,
8998 fold_convert (type, esz));
9003 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9004 of the array. Loop optimizer sometimes produce this type of
9006 if (TREE_CODE (arg0) == ADDR_EXPR)
9008 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9010 return fold_convert (type, tem);
9013 if (flag_unsafe_math_optimizations
9014 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9015 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9016 && (tem = distribute_real_division (code, type, arg0, arg1)))
9019 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9021 if ((TREE_CODE (arg0) == MULT_EXPR
9022 || TREE_CODE (arg1) == MULT_EXPR)
9023 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9025 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9033 /* (-A) * (-B) -> A * B */
9034 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9035 return fold_build2 (MULT_EXPR, type,
9036 fold_convert (type, TREE_OPERAND (arg0, 0)),
9037 fold_convert (type, negate_expr (arg1)));
9038 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9039 return fold_build2 (MULT_EXPR, type,
9040 fold_convert (type, negate_expr (arg0)),
9041 fold_convert (type, TREE_OPERAND (arg1, 0)));
9043 if (! FLOAT_TYPE_P (type))
9045 if (integer_zerop (arg1))
9046 return omit_one_operand (type, arg1, arg0);
9047 if (integer_onep (arg1))
9048 return non_lvalue (fold_convert (type, arg0));
9049 /* Transform x * -1 into -x. */
9050 if (integer_all_onesp (arg1))
9051 return fold_convert (type, negate_expr (arg0));
9052 /* Transform x * -C into -x * C if x is easily negatable. */
9053 if (TREE_CODE (arg1) == INTEGER_CST
9054 && tree_int_cst_sgn (arg1) == -1
9055 && negate_expr_p (arg0)
9056 && (tem = negate_expr (arg1)) != arg1
9057 && !TREE_OVERFLOW (tem))
9058 return fold_build2 (MULT_EXPR, type,
9059 negate_expr (arg0), tem);
9061 /* (a * (1 << b)) is (a << b) */
9062 if (TREE_CODE (arg1) == LSHIFT_EXPR
9063 && integer_onep (TREE_OPERAND (arg1, 0)))
9064 return fold_build2 (LSHIFT_EXPR, type, arg0,
9065 TREE_OPERAND (arg1, 1));
9066 if (TREE_CODE (arg0) == LSHIFT_EXPR
9067 && integer_onep (TREE_OPERAND (arg0, 0)))
9068 return fold_build2 (LSHIFT_EXPR, type, arg1,
9069 TREE_OPERAND (arg0, 1));
9071 if (TREE_CODE (arg1) == INTEGER_CST
9072 && 0 != (tem = extract_muldiv (op0,
9073 fold_convert (type, arg1),
9075 return fold_convert (type, tem);
9077 /* Optimize z * conj(z) for integer complex numbers. */
9078 if (TREE_CODE (arg0) == CONJ_EXPR
9079 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9080 return fold_mult_zconjz (type, arg1);
9081 if (TREE_CODE (arg1) == CONJ_EXPR
9082 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9083 return fold_mult_zconjz (type, arg0);
9087 /* Maybe fold x * 0 to 0. The expressions aren't the same
9088 when x is NaN, since x * 0 is also NaN. Nor are they the
9089 same in modes with signed zeros, since multiplying a
9090 negative value by 0 gives -0, not +0. */
9091 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9092 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9093 && real_zerop (arg1))
9094 return omit_one_operand (type, arg1, arg0);
9095 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9096 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9097 && real_onep (arg1))
9098 return non_lvalue (fold_convert (type, arg0));
9100 /* Transform x * -1.0 into -x. */
9101 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9102 && real_minus_onep (arg1))
9103 return fold_convert (type, negate_expr (arg0));
9105 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9106 if (flag_unsafe_math_optimizations
9107 && TREE_CODE (arg0) == RDIV_EXPR
9108 && TREE_CODE (arg1) == REAL_CST
9109 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9111 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9114 return fold_build2 (RDIV_EXPR, type, tem,
9115 TREE_OPERAND (arg0, 1));
9118 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9119 if (operand_equal_p (arg0, arg1, 0))
9121 tree tem = fold_strip_sign_ops (arg0);
9122 if (tem != NULL_TREE)
9124 tem = fold_convert (type, tem);
9125 return fold_build2 (MULT_EXPR, type, tem, tem);
9129 /* Optimize z * conj(z) for floating point complex numbers.
9130 Guarded by flag_unsafe_math_optimizations as non-finite
9131 imaginary components don't produce scalar results. */
9132 if (flag_unsafe_math_optimizations
9133 && TREE_CODE (arg0) == CONJ_EXPR
9134 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9135 return fold_mult_zconjz (type, arg1);
9136 if (flag_unsafe_math_optimizations
9137 && TREE_CODE (arg1) == CONJ_EXPR
9138 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9139 return fold_mult_zconjz (type, arg0);
9141 if (flag_unsafe_math_optimizations)
9143 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9144 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9146 /* Optimizations of root(...)*root(...). */
9147 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9149 tree rootfn, arg, arglist;
9150 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9151 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9153 /* Optimize sqrt(x)*sqrt(x) as x. */
9154 if (BUILTIN_SQRT_P (fcode0)
9155 && operand_equal_p (arg00, arg10, 0)
9156 && ! HONOR_SNANS (TYPE_MODE (type)))
9159 /* Optimize root(x)*root(y) as root(x*y). */
9160 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9161 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9162 arglist = build_tree_list (NULL_TREE, arg);
9163 return build_function_call_expr (rootfn, arglist);
9166 /* Optimize expN(x)*expN(y) as expN(x+y). */
9167 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9169 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9170 tree arg = fold_build2 (PLUS_EXPR, type,
9171 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9172 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9173 tree arglist = build_tree_list (NULL_TREE, arg);
9174 return build_function_call_expr (expfn, arglist);
9177 /* Optimizations of pow(...)*pow(...). */
9178 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9179 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9180 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9182 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9183 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9185 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9186 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9189 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9190 if (operand_equal_p (arg01, arg11, 0))
9192 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9193 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9194 tree arglist = tree_cons (NULL_TREE, arg,
9195 build_tree_list (NULL_TREE,
9197 return build_function_call_expr (powfn, arglist);
9200 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9201 if (operand_equal_p (arg00, arg10, 0))
9203 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9204 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9205 tree arglist = tree_cons (NULL_TREE, arg00,
9206 build_tree_list (NULL_TREE,
9208 return build_function_call_expr (powfn, arglist);
9212 /* Optimize tan(x)*cos(x) as sin(x). */
9213 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9214 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9215 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9216 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9217 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9218 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9219 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9220 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9222 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9224 if (sinfn != NULL_TREE)
9225 return build_function_call_expr (sinfn,
9226 TREE_OPERAND (arg0, 1));
9229 /* Optimize x*pow(x,c) as pow(x,c+1). */
9230 if (fcode1 == BUILT_IN_POW
9231 || fcode1 == BUILT_IN_POWF
9232 || fcode1 == BUILT_IN_POWL)
9234 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9235 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9237 if (TREE_CODE (arg11) == REAL_CST
9238 && ! TREE_CONSTANT_OVERFLOW (arg11)
9239 && operand_equal_p (arg0, arg10, 0))
9241 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9245 c = TREE_REAL_CST (arg11);
9246 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9247 arg = build_real (type, c);
9248 arglist = build_tree_list (NULL_TREE, arg);
9249 arglist = tree_cons (NULL_TREE, arg0, arglist);
9250 return build_function_call_expr (powfn, arglist);
9254 /* Optimize pow(x,c)*x as pow(x,c+1). */
9255 if (fcode0 == BUILT_IN_POW
9256 || fcode0 == BUILT_IN_POWF
9257 || fcode0 == BUILT_IN_POWL)
9259 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9260 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9262 if (TREE_CODE (arg01) == REAL_CST
9263 && ! TREE_CONSTANT_OVERFLOW (arg01)
9264 && operand_equal_p (arg1, arg00, 0))
9266 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9270 c = TREE_REAL_CST (arg01);
9271 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9272 arg = build_real (type, c);
9273 arglist = build_tree_list (NULL_TREE, arg);
9274 arglist = tree_cons (NULL_TREE, arg1, arglist);
9275 return build_function_call_expr (powfn, arglist);
9279 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9281 && operand_equal_p (arg0, arg1, 0))
9283 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9287 tree arg = build_real (type, dconst2);
9288 tree arglist = build_tree_list (NULL_TREE, arg);
9289 arglist = tree_cons (NULL_TREE, arg0, arglist);
9290 return build_function_call_expr (powfn, arglist);
9299 if (integer_all_onesp (arg1))
9300 return omit_one_operand (type, arg1, arg0);
9301 if (integer_zerop (arg1))
9302 return non_lvalue (fold_convert (type, arg0));
9303 if (operand_equal_p (arg0, arg1, 0))
9304 return non_lvalue (fold_convert (type, arg0));
9307 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9308 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9310 t1 = build_int_cst (type, -1);
9311 t1 = force_fit_type (t1, 0, false, false);
9312 return omit_one_operand (type, t1, arg1);
9316 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9317 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9319 t1 = build_int_cst (type, -1);
9320 t1 = force_fit_type (t1, 0, false, false);
9321 return omit_one_operand (type, t1, arg0);
9324 /* Canonicalize (X & C1) | C2. */
9325 if (TREE_CODE (arg0) == BIT_AND_EXPR
9326 && TREE_CODE (arg1) == INTEGER_CST
9327 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9329 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9330 int width = TYPE_PRECISION (type);
9331 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9332 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9333 hi2 = TREE_INT_CST_HIGH (arg1);
9334 lo2 = TREE_INT_CST_LOW (arg1);
9336 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9337 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9338 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9340 if (width > HOST_BITS_PER_WIDE_INT)
9342 mhi = (unsigned HOST_WIDE_INT) -1
9343 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9349 mlo = (unsigned HOST_WIDE_INT) -1
9350 >> (HOST_BITS_PER_WIDE_INT - width);
9353 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9354 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9355 return fold_build2 (BIT_IOR_EXPR, type,
9356 TREE_OPERAND (arg0, 0), arg1);
9358 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9361 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9362 return fold_build2 (BIT_IOR_EXPR, type,
9363 fold_build2 (BIT_AND_EXPR, type,
9364 TREE_OPERAND (arg0, 0),
9365 build_int_cst_wide (type,
9371 /* (X & Y) | Y is (X, Y). */
9372 if (TREE_CODE (arg0) == BIT_AND_EXPR
9373 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9374 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9375 /* (X & Y) | X is (Y, X). */
9376 if (TREE_CODE (arg0) == BIT_AND_EXPR
9377 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9378 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9379 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9380 /* X | (X & Y) is (Y, X). */
9381 if (TREE_CODE (arg1) == BIT_AND_EXPR
9382 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9383 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9384 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9385 /* X | (Y & X) is (Y, X). */
9386 if (TREE_CODE (arg1) == BIT_AND_EXPR
9387 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9388 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9389 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9391 t1 = distribute_bit_expr (code, type, arg0, arg1);
9392 if (t1 != NULL_TREE)
9395 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9397 This results in more efficient code for machines without a NAND
9398 instruction. Combine will canonicalize to the first form
9399 which will allow use of NAND instructions provided by the
9400 backend if they exist. */
9401 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9402 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9404 return fold_build1 (BIT_NOT_EXPR, type,
9405 build2 (BIT_AND_EXPR, type,
9406 TREE_OPERAND (arg0, 0),
9407 TREE_OPERAND (arg1, 0)));
9410 /* See if this can be simplified into a rotate first. If that
9411 is unsuccessful continue in the association code. */
9415 if (integer_zerop (arg1))
9416 return non_lvalue (fold_convert (type, arg0));
9417 if (integer_all_onesp (arg1))
9418 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9419 if (operand_equal_p (arg0, arg1, 0))
9420 return omit_one_operand (type, integer_zero_node, arg0);
9423 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9424 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9426 t1 = build_int_cst (type, -1);
9427 t1 = force_fit_type (t1, 0, false, false);
9428 return omit_one_operand (type, t1, arg1);
9432 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9433 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9435 t1 = build_int_cst (type, -1);
9436 t1 = force_fit_type (t1, 0, false, false);
9437 return omit_one_operand (type, t1, arg0);
9440 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9441 with a constant, and the two constants have no bits in common,
9442 we should treat this as a BIT_IOR_EXPR since this may produce more
9444 if (TREE_CODE (arg0) == BIT_AND_EXPR
9445 && TREE_CODE (arg1) == BIT_AND_EXPR
9446 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9447 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9448 && integer_zerop (const_binop (BIT_AND_EXPR,
9449 TREE_OPERAND (arg0, 1),
9450 TREE_OPERAND (arg1, 1), 0)))
9452 code = BIT_IOR_EXPR;
9456 /* (X | Y) ^ X -> Y & ~ X*/
9457 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9460 tree t2 = TREE_OPERAND (arg0, 1);
9461 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9463 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9464 fold_convert (type, t1));
9468 /* (Y | X) ^ X -> Y & ~ X*/
9469 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9470 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9472 tree t2 = TREE_OPERAND (arg0, 0);
9473 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9475 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9476 fold_convert (type, t1));
9480 /* X ^ (X | Y) -> Y & ~ X*/
9481 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9482 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9484 tree t2 = TREE_OPERAND (arg1, 1);
9485 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9487 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9488 fold_convert (type, t1));
9492 /* X ^ (Y | X) -> Y & ~ X*/
9493 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9494 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9496 tree t2 = TREE_OPERAND (arg1, 0);
9497 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9499 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9500 fold_convert (type, t1));
9504 /* Convert ~X ^ ~Y to X ^ Y. */
9505 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9506 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9507 return fold_build2 (code, type,
9508 fold_convert (type, TREE_OPERAND (arg0, 0)),
9509 fold_convert (type, TREE_OPERAND (arg1, 0)));
9511 /* Convert ~X ^ C to X ^ ~C. */
9512 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9513 && TREE_CODE (arg1) == INTEGER_CST)
9514 return fold_build2 (code, type,
9515 fold_convert (type, TREE_OPERAND (arg0, 0)),
9516 fold_build1 (BIT_NOT_EXPR, type, arg1));
9518 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9519 if (TREE_CODE (arg0) == BIT_AND_EXPR
9520 && integer_onep (TREE_OPERAND (arg0, 1))
9521 && integer_onep (arg1))
9522 return fold_build2 (EQ_EXPR, type, arg0,
9523 build_int_cst (TREE_TYPE (arg0), 0));
9525 /* Fold (X & Y) ^ Y as ~X & Y. */
9526 if (TREE_CODE (arg0) == BIT_AND_EXPR
9527 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9529 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9530 return fold_build2 (BIT_AND_EXPR, type,
9531 fold_build1 (BIT_NOT_EXPR, type, tem),
9532 fold_convert (type, arg1));
9534 /* Fold (X & Y) ^ X as ~Y & X. */
9535 if (TREE_CODE (arg0) == BIT_AND_EXPR
9536 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9537 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9539 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9540 return fold_build2 (BIT_AND_EXPR, type,
9541 fold_build1 (BIT_NOT_EXPR, type, tem),
9542 fold_convert (type, arg1));
9544 /* Fold X ^ (X & Y) as X & ~Y. */
9545 if (TREE_CODE (arg1) == BIT_AND_EXPR
9546 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9548 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9549 return fold_build2 (BIT_AND_EXPR, type,
9550 fold_convert (type, arg0),
9551 fold_build1 (BIT_NOT_EXPR, type, tem));
9553 /* Fold X ^ (Y & X) as ~Y & X. */
9554 if (TREE_CODE (arg1) == BIT_AND_EXPR
9555 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9556 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9558 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9559 return fold_build2 (BIT_AND_EXPR, type,
9560 fold_build1 (BIT_NOT_EXPR, type, tem),
9561 fold_convert (type, arg0));
9564 /* See if this can be simplified into a rotate first. If that
9565 is unsuccessful continue in the association code. */
9569 if (integer_all_onesp (arg1))
9570 return non_lvalue (fold_convert (type, arg0));
9571 if (integer_zerop (arg1))
9572 return omit_one_operand (type, arg1, arg0);
9573 if (operand_equal_p (arg0, arg1, 0))
9574 return non_lvalue (fold_convert (type, arg0));
9576 /* ~X & X is always zero. */
9577 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9578 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9579 return omit_one_operand (type, integer_zero_node, arg1);
9581 /* X & ~X is always zero. */
9582 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9583 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9584 return omit_one_operand (type, integer_zero_node, arg0);
9586 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9587 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9588 && TREE_CODE (arg1) == INTEGER_CST
9589 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9590 return fold_build2 (BIT_IOR_EXPR, type,
9591 fold_build2 (BIT_AND_EXPR, type,
9592 TREE_OPERAND (arg0, 0), arg1),
9593 fold_build2 (BIT_AND_EXPR, type,
9594 TREE_OPERAND (arg0, 1), arg1));
9596 /* (X | Y) & Y is (X, Y). */
9597 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9598 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9599 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9600 /* (X | Y) & X is (Y, X). */
9601 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9603 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9604 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9605 /* X & (X | Y) is (Y, X). */
9606 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9607 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9608 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9609 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9610 /* X & (Y | X) is (Y, X). */
9611 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9612 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9613 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9614 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9616 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9617 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9618 && integer_onep (TREE_OPERAND (arg0, 1))
9619 && integer_onep (arg1))
9621 tem = TREE_OPERAND (arg0, 0);
9622 return fold_build2 (EQ_EXPR, type,
9623 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9624 build_int_cst (TREE_TYPE (tem), 1)),
9625 build_int_cst (TREE_TYPE (tem), 0));
9627 /* Fold ~X & 1 as (X & 1) == 0. */
9628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9629 && integer_onep (arg1))
9631 tem = TREE_OPERAND (arg0, 0);
9632 return fold_build2 (EQ_EXPR, type,
9633 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9634 build_int_cst (TREE_TYPE (tem), 1)),
9635 build_int_cst (TREE_TYPE (tem), 0));
9638 /* Fold (X ^ Y) & Y as ~X & Y. */
9639 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9640 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9642 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9643 return fold_build2 (BIT_AND_EXPR, type,
9644 fold_build1 (BIT_NOT_EXPR, type, tem),
9645 fold_convert (type, arg1));
9647 /* Fold (X ^ Y) & X as ~Y & X. */
9648 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9649 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9650 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9652 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9653 return fold_build2 (BIT_AND_EXPR, type,
9654 fold_build1 (BIT_NOT_EXPR, type, tem),
9655 fold_convert (type, arg1));
9657 /* Fold X & (X ^ Y) as X & ~Y. */
9658 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9659 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9661 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9662 return fold_build2 (BIT_AND_EXPR, type,
9663 fold_convert (type, arg0),
9664 fold_build1 (BIT_NOT_EXPR, type, tem));
9666 /* Fold X & (Y ^ X) as ~Y & X. */
9667 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9668 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9669 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9671 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9672 return fold_build2 (BIT_AND_EXPR, type,
9673 fold_build1 (BIT_NOT_EXPR, type, tem),
9674 fold_convert (type, arg0));
9677 t1 = distribute_bit_expr (code, type, arg0, arg1);
9678 if (t1 != NULL_TREE)
9680 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9681 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9682 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9685 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9687 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9688 && (~TREE_INT_CST_LOW (arg1)
9689 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9690 return fold_convert (type, TREE_OPERAND (arg0, 0));
9693 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9695 This results in more efficient code for machines without a NOR
9696 instruction. Combine will canonicalize to the first form
9697 which will allow use of NOR instructions provided by the
9698 backend if they exist. */
9699 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9700 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9702 return fold_build1 (BIT_NOT_EXPR, type,
9703 build2 (BIT_IOR_EXPR, type,
9704 TREE_OPERAND (arg0, 0),
9705 TREE_OPERAND (arg1, 0)));
9711 /* Don't touch a floating-point divide by zero unless the mode
9712 of the constant can represent infinity. */
9713 if (TREE_CODE (arg1) == REAL_CST
9714 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9715 && real_zerop (arg1))
9718 /* Optimize A / A to 1.0 if we don't care about
9719 NaNs or Infinities. Skip the transformation
9720 for non-real operands. */
9721 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9722 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9723 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9724 && operand_equal_p (arg0, arg1, 0))
9726 tree r = build_real (TREE_TYPE (arg0), dconst1);
9728 return omit_two_operands (type, r, arg0, arg1);
9731 /* The complex version of the above A / A optimization. */
9732 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9733 && operand_equal_p (arg0, arg1, 0))
9735 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9736 if (! HONOR_NANS (TYPE_MODE (elem_type))
9737 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9739 tree r = build_real (elem_type, dconst1);
9740 /* omit_two_operands will call fold_convert for us. */
9741 return omit_two_operands (type, r, arg0, arg1);
9745 /* (-A) / (-B) -> A / B */
9746 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9747 return fold_build2 (RDIV_EXPR, type,
9748 TREE_OPERAND (arg0, 0),
9749 negate_expr (arg1));
9750 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9751 return fold_build2 (RDIV_EXPR, type,
9753 TREE_OPERAND (arg1, 0));
9755 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9756 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9757 && real_onep (arg1))
9758 return non_lvalue (fold_convert (type, arg0));
9760 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9761 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9762 && real_minus_onep (arg1))
9763 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9765 /* If ARG1 is a constant, we can convert this to a multiply by the
9766 reciprocal. This does not have the same rounding properties,
9767 so only do this if -funsafe-math-optimizations. We can actually
9768 always safely do it if ARG1 is a power of two, but it's hard to
9769 tell if it is or not in a portable manner. */
9770 if (TREE_CODE (arg1) == REAL_CST)
9772 if (flag_unsafe_math_optimizations
9773 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9775 return fold_build2 (MULT_EXPR, type, arg0, tem);
9776 /* Find the reciprocal if optimizing and the result is exact. */
9780 r = TREE_REAL_CST (arg1);
9781 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9783 tem = build_real (type, r);
9784 return fold_build2 (MULT_EXPR, type,
9785 fold_convert (type, arg0), tem);
9789 /* Convert A/B/C to A/(B*C). */
9790 if (flag_unsafe_math_optimizations
9791 && TREE_CODE (arg0) == RDIV_EXPR)
9792 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9793 fold_build2 (MULT_EXPR, type,
9794 TREE_OPERAND (arg0, 1), arg1));
9796 /* Convert A/(B/C) to (A/B)*C. */
9797 if (flag_unsafe_math_optimizations
9798 && TREE_CODE (arg1) == RDIV_EXPR)
9799 return fold_build2 (MULT_EXPR, type,
9800 fold_build2 (RDIV_EXPR, type, arg0,
9801 TREE_OPERAND (arg1, 0)),
9802 TREE_OPERAND (arg1, 1));
9804 /* Convert C1/(X*C2) into (C1/C2)/X. */
9805 if (flag_unsafe_math_optimizations
9806 && TREE_CODE (arg1) == MULT_EXPR
9807 && TREE_CODE (arg0) == REAL_CST
9808 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9810 tree tem = const_binop (RDIV_EXPR, arg0,
9811 TREE_OPERAND (arg1, 1), 0);
9813 return fold_build2 (RDIV_EXPR, type, tem,
9814 TREE_OPERAND (arg1, 0));
9817 if (flag_unsafe_math_optimizations)
9819 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9820 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9822 /* Optimize sin(x)/cos(x) as tan(x). */
9823 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9824 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9825 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9826 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9827 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9829 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9831 if (tanfn != NULL_TREE)
9832 return build_function_call_expr (tanfn,
9833 TREE_OPERAND (arg0, 1));
9836 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9837 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9838 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9839 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9840 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9841 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9843 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9845 if (tanfn != NULL_TREE)
9847 tree tmp = TREE_OPERAND (arg0, 1);
9848 tmp = build_function_call_expr (tanfn, tmp);
9849 return fold_build2 (RDIV_EXPR, type,
9850 build_real (type, dconst1), tmp);
9854 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9855 NaNs or Infinities. */
9856 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9857 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9858 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9860 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9861 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9863 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9864 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9865 && operand_equal_p (arg00, arg01, 0))
9867 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9869 if (cosfn != NULL_TREE)
9870 return build_function_call_expr (cosfn,
9871 TREE_OPERAND (arg0, 1));
9875 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9876 NaNs or Infinities. */
9877 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9878 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9879 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9881 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9882 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9884 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9885 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9886 && operand_equal_p (arg00, arg01, 0))
9888 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9890 if (cosfn != NULL_TREE)
9892 tree tmp = TREE_OPERAND (arg0, 1);
9893 tmp = build_function_call_expr (cosfn, tmp);
9894 return fold_build2 (RDIV_EXPR, type,
9895 build_real (type, dconst1),
9901 /* Optimize pow(x,c)/x as pow(x,c-1). */
9902 if (fcode0 == BUILT_IN_POW
9903 || fcode0 == BUILT_IN_POWF
9904 || fcode0 == BUILT_IN_POWL)
9906 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9907 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9908 if (TREE_CODE (arg01) == REAL_CST
9909 && ! TREE_CONSTANT_OVERFLOW (arg01)
9910 && operand_equal_p (arg1, arg00, 0))
9912 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9916 c = TREE_REAL_CST (arg01);
9917 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9918 arg = build_real (type, c);
9919 arglist = build_tree_list (NULL_TREE, arg);
9920 arglist = tree_cons (NULL_TREE, arg1, arglist);
9921 return build_function_call_expr (powfn, arglist);
9925 /* Optimize x/expN(y) into x*expN(-y). */
9926 if (BUILTIN_EXPONENT_P (fcode1))
9928 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9929 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9930 tree arglist = build_tree_list (NULL_TREE,
9931 fold_convert (type, arg));
9932 arg1 = build_function_call_expr (expfn, arglist);
9933 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9936 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9937 if (fcode1 == BUILT_IN_POW
9938 || fcode1 == BUILT_IN_POWF
9939 || fcode1 == BUILT_IN_POWL)
9941 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9942 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9943 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9944 tree neg11 = fold_convert (type, negate_expr (arg11));
9945 tree arglist = tree_cons(NULL_TREE, arg10,
9946 build_tree_list (NULL_TREE, neg11));
9947 arg1 = build_function_call_expr (powfn, arglist);
9948 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9953 case TRUNC_DIV_EXPR:
9954 case FLOOR_DIV_EXPR:
9955 /* Simplify A / (B << N) where A and B are positive and B is
9956 a power of 2, to A >> (N + log2(B)). */
9957 if (TREE_CODE (arg1) == LSHIFT_EXPR
9958 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9960 tree sval = TREE_OPERAND (arg1, 0);
9961 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9963 tree sh_cnt = TREE_OPERAND (arg1, 1);
9964 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9966 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9967 sh_cnt, build_int_cst (NULL_TREE, pow2));
9968 return fold_build2 (RSHIFT_EXPR, type,
9969 fold_convert (type, arg0), sh_cnt);
9974 case ROUND_DIV_EXPR:
9976 case EXACT_DIV_EXPR:
9977 if (integer_onep (arg1))
9978 return non_lvalue (fold_convert (type, arg0));
9979 if (integer_zerop (arg1))
9982 if (!TYPE_UNSIGNED (type)
9983 && TREE_CODE (arg1) == INTEGER_CST
9984 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9985 && TREE_INT_CST_HIGH (arg1) == -1)
9986 return fold_convert (type, negate_expr (arg0));
9988 /* Convert -A / -B to A / B when the type is signed and overflow is
9990 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9991 && TREE_CODE (arg0) == NEGATE_EXPR
9992 && negate_expr_p (arg1))
9993 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9994 negate_expr (arg1));
9995 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9996 && TREE_CODE (arg1) == NEGATE_EXPR
9997 && negate_expr_p (arg0))
9998 return fold_build2 (code, type, negate_expr (arg0),
9999 TREE_OPERAND (arg1, 0));
10001 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10002 operation, EXACT_DIV_EXPR.
10004 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10005 At one time others generated faster code, it's not clear if they do
10006 after the last round to changes to the DIV code in expmed.c. */
10007 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10008 && multiple_of_p (type, arg0, arg1))
10009 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10011 if (TREE_CODE (arg1) == INTEGER_CST
10012 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10013 return fold_convert (type, tem);
10017 case CEIL_MOD_EXPR:
10018 case FLOOR_MOD_EXPR:
10019 case ROUND_MOD_EXPR:
10020 case TRUNC_MOD_EXPR:
10021 /* X % 1 is always zero, but be sure to preserve any side
10023 if (integer_onep (arg1))
10024 return omit_one_operand (type, integer_zero_node, arg0);
10026 /* X % 0, return X % 0 unchanged so that we can get the
10027 proper warnings and errors. */
10028 if (integer_zerop (arg1))
10031 /* 0 % X is always zero, but be sure to preserve any side
10032 effects in X. Place this after checking for X == 0. */
10033 if (integer_zerop (arg0))
10034 return omit_one_operand (type, integer_zero_node, arg1);
10036 /* X % -1 is zero. */
10037 if (!TYPE_UNSIGNED (type)
10038 && TREE_CODE (arg1) == INTEGER_CST
10039 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10040 && TREE_INT_CST_HIGH (arg1) == -1)
10041 return omit_one_operand (type, integer_zero_node, arg0);
10043 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10044 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10045 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10046 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10049 /* Also optimize A % (C << N) where C is a power of 2,
10050 to A & ((C << N) - 1). */
10051 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10052 c = TREE_OPERAND (arg1, 0);
10054 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10056 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10057 arg1, integer_one_node);
10058 return fold_build2 (BIT_AND_EXPR, type,
10059 fold_convert (type, arg0),
10060 fold_convert (type, mask));
10064 /* X % -C is the same as X % C. */
10065 if (code == TRUNC_MOD_EXPR
10066 && !TYPE_UNSIGNED (type)
10067 && TREE_CODE (arg1) == INTEGER_CST
10068 && !TREE_CONSTANT_OVERFLOW (arg1)
10069 && TREE_INT_CST_HIGH (arg1) < 0
10071 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10072 && !sign_bit_p (arg1, arg1))
10073 return fold_build2 (code, type, fold_convert (type, arg0),
10074 fold_convert (type, negate_expr (arg1)));
10076 /* X % -Y is the same as X % Y. */
10077 if (code == TRUNC_MOD_EXPR
10078 && !TYPE_UNSIGNED (type)
10079 && TREE_CODE (arg1) == NEGATE_EXPR
10081 return fold_build2 (code, type, fold_convert (type, arg0),
10082 fold_convert (type, TREE_OPERAND (arg1, 0)));
10084 if (TREE_CODE (arg1) == INTEGER_CST
10085 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10086 return fold_convert (type, tem);
10092 if (integer_all_onesp (arg0))
10093 return omit_one_operand (type, arg0, arg1);
10097 /* Optimize -1 >> x for arithmetic right shifts. */
10098 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10099 return omit_one_operand (type, arg0, arg1);
10100 /* ... fall through ... */
10104 if (integer_zerop (arg1))
10105 return non_lvalue (fold_convert (type, arg0));
10106 if (integer_zerop (arg0))
10107 return omit_one_operand (type, arg0, arg1);
10109 /* Since negative shift count is not well-defined,
10110 don't try to compute it in the compiler. */
10111 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10114 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10115 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10116 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10117 && host_integerp (TREE_OPERAND (arg0, 1), false)
10118 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10120 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10121 + TREE_INT_CST_LOW (arg1));
10123 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10124 being well defined. */
10125 if (low >= TYPE_PRECISION (type))
10127 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10128 low = low % TYPE_PRECISION (type);
10129 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10130 return build_int_cst (type, 0);
10132 low = TYPE_PRECISION (type) - 1;
10135 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10136 build_int_cst (type, low));
10139 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10140 into x & ((unsigned)-1 >> c) for unsigned types. */
10141 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10142 || (TYPE_UNSIGNED (type)
10143 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10144 && host_integerp (arg1, false)
10145 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10146 && host_integerp (TREE_OPERAND (arg0, 1), false)
10147 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10149 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10150 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10156 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10158 lshift = build_int_cst (type, -1);
10159 lshift = int_const_binop (code, lshift, arg1, 0);
10161 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10165 /* Rewrite an LROTATE_EXPR by a constant into an
10166 RROTATE_EXPR by a new constant. */
10167 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10169 tree tem = build_int_cst (NULL_TREE,
10170 GET_MODE_BITSIZE (TYPE_MODE (type)));
10171 tem = fold_convert (TREE_TYPE (arg1), tem);
10172 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10173 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10176 /* If we have a rotate of a bit operation with the rotate count and
10177 the second operand of the bit operation both constant,
10178 permute the two operations. */
10179 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10180 && (TREE_CODE (arg0) == BIT_AND_EXPR
10181 || TREE_CODE (arg0) == BIT_IOR_EXPR
10182 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10183 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10184 return fold_build2 (TREE_CODE (arg0), type,
10185 fold_build2 (code, type,
10186 TREE_OPERAND (arg0, 0), arg1),
10187 fold_build2 (code, type,
10188 TREE_OPERAND (arg0, 1), arg1));
10190 /* Two consecutive rotates adding up to the width of the mode can
10192 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10193 && TREE_CODE (arg0) == RROTATE_EXPR
10194 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10195 && TREE_INT_CST_HIGH (arg1) == 0
10196 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10197 && ((TREE_INT_CST_LOW (arg1)
10198 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10199 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10200 return TREE_OPERAND (arg0, 0);
10205 if (operand_equal_p (arg0, arg1, 0))
10206 return omit_one_operand (type, arg0, arg1);
10207 if (INTEGRAL_TYPE_P (type)
10208 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10209 return omit_one_operand (type, arg1, arg0);
10210 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10216 if (operand_equal_p (arg0, arg1, 0))
10217 return omit_one_operand (type, arg0, arg1);
10218 if (INTEGRAL_TYPE_P (type)
10219 && TYPE_MAX_VALUE (type)
10220 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10221 return omit_one_operand (type, arg1, arg0);
10222 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10227 case TRUTH_ANDIF_EXPR:
10228 /* Note that the operands of this must be ints
10229 and their values must be 0 or 1.
10230 ("true" is a fixed value perhaps depending on the language.) */
10231 /* If first arg is constant zero, return it. */
10232 if (integer_zerop (arg0))
10233 return fold_convert (type, arg0);
10234 case TRUTH_AND_EXPR:
10235 /* If either arg is constant true, drop it. */
10236 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10237 return non_lvalue (fold_convert (type, arg1));
10238 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10239 /* Preserve sequence points. */
10240 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10241 return non_lvalue (fold_convert (type, arg0));
10242 /* If second arg is constant zero, result is zero, but first arg
10243 must be evaluated. */
10244 if (integer_zerop (arg1))
10245 return omit_one_operand (type, arg1, arg0);
10246 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10247 case will be handled here. */
10248 if (integer_zerop (arg0))
10249 return omit_one_operand (type, arg0, arg1);
10251 /* !X && X is always false. */
10252 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10253 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10254 return omit_one_operand (type, integer_zero_node, arg1);
10255 /* X && !X is always false. */
10256 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10257 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10258 return omit_one_operand (type, integer_zero_node, arg0);
10260 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10261 means A >= Y && A != MAX, but in this case we know that
10264 if (!TREE_SIDE_EFFECTS (arg0)
10265 && !TREE_SIDE_EFFECTS (arg1))
10267 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10268 if (tem && !operand_equal_p (tem, arg0, 0))
10269 return fold_build2 (code, type, tem, arg1);
10271 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10272 if (tem && !operand_equal_p (tem, arg1, 0))
10273 return fold_build2 (code, type, arg0, tem);
10277 /* We only do these simplifications if we are optimizing. */
10281 /* Check for things like (A || B) && (A || C). We can convert this
10282 to A || (B && C). Note that either operator can be any of the four
10283 truth and/or operations and the transformation will still be
10284 valid. Also note that we only care about order for the
10285 ANDIF and ORIF operators. If B contains side effects, this
10286 might change the truth-value of A. */
10287 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10288 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10289 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10290 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10291 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10292 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10294 tree a00 = TREE_OPERAND (arg0, 0);
10295 tree a01 = TREE_OPERAND (arg0, 1);
10296 tree a10 = TREE_OPERAND (arg1, 0);
10297 tree a11 = TREE_OPERAND (arg1, 1);
10298 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10299 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10300 && (code == TRUTH_AND_EXPR
10301 || code == TRUTH_OR_EXPR));
10303 if (operand_equal_p (a00, a10, 0))
10304 return fold_build2 (TREE_CODE (arg0), type, a00,
10305 fold_build2 (code, type, a01, a11));
10306 else if (commutative && operand_equal_p (a00, a11, 0))
10307 return fold_build2 (TREE_CODE (arg0), type, a00,
10308 fold_build2 (code, type, a01, a10));
10309 else if (commutative && operand_equal_p (a01, a10, 0))
10310 return fold_build2 (TREE_CODE (arg0), type, a01,
10311 fold_build2 (code, type, a00, a11));
10313 /* This case if tricky because we must either have commutative
10314 operators or else A10 must not have side-effects. */
10316 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10317 && operand_equal_p (a01, a11, 0))
10318 return fold_build2 (TREE_CODE (arg0), type,
10319 fold_build2 (code, type, a00, a10),
10323 /* See if we can build a range comparison. */
10324 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10327 /* Check for the possibility of merging component references. If our
10328 lhs is another similar operation, try to merge its rhs with our
10329 rhs. Then try to merge our lhs and rhs. */
10330 if (TREE_CODE (arg0) == code
10331 && 0 != (tem = fold_truthop (code, type,
10332 TREE_OPERAND (arg0, 1), arg1)))
10333 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10335 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10340 case TRUTH_ORIF_EXPR:
10341 /* Note that the operands of this must be ints
10342 and their values must be 0 or true.
10343 ("true" is a fixed value perhaps depending on the language.) */
10344 /* If first arg is constant true, return it. */
10345 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10346 return fold_convert (type, arg0);
10347 case TRUTH_OR_EXPR:
10348 /* If either arg is constant zero, drop it. */
10349 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10350 return non_lvalue (fold_convert (type, arg1));
10351 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10352 /* Preserve sequence points. */
10353 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10354 return non_lvalue (fold_convert (type, arg0));
10355 /* If second arg is constant true, result is true, but we must
10356 evaluate first arg. */
10357 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10358 return omit_one_operand (type, arg1, arg0);
10359 /* Likewise for first arg, but note this only occurs here for
10361 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10362 return omit_one_operand (type, arg0, arg1);
10364 /* !X || X is always true. */
10365 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10366 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10367 return omit_one_operand (type, integer_one_node, arg1);
10368 /* X || !X is always true. */
10369 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10370 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10371 return omit_one_operand (type, integer_one_node, arg0);
10375 case TRUTH_XOR_EXPR:
10376 /* If the second arg is constant zero, drop it. */
10377 if (integer_zerop (arg1))
10378 return non_lvalue (fold_convert (type, arg0));
10379 /* If the second arg is constant true, this is a logical inversion. */
10380 if (integer_onep (arg1))
10382 /* Only call invert_truthvalue if operand is a truth value. */
10383 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10384 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10386 tem = invert_truthvalue (arg0);
10387 return non_lvalue (fold_convert (type, tem));
10389 /* Identical arguments cancel to zero. */
10390 if (operand_equal_p (arg0, arg1, 0))
10391 return omit_one_operand (type, integer_zero_node, arg0);
10393 /* !X ^ X is always true. */
10394 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10395 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10396 return omit_one_operand (type, integer_one_node, arg1);
10398 /* X ^ !X is always true. */
10399 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10400 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10401 return omit_one_operand (type, integer_one_node, arg0);
10407 tem = fold_comparison (code, type, op0, op1);
10408 if (tem != NULL_TREE)
10411 /* bool_var != 0 becomes bool_var. */
10412 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10413 && code == NE_EXPR)
10414 return non_lvalue (fold_convert (type, arg0));
10416 /* bool_var == 1 becomes bool_var. */
10417 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10418 && code == EQ_EXPR)
10419 return non_lvalue (fold_convert (type, arg0));
10421 /* bool_var != 1 becomes !bool_var. */
10422 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10423 && code == NE_EXPR)
10424 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10426 /* bool_var == 0 becomes !bool_var. */
10427 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10428 && code == EQ_EXPR)
10429 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10431 /* If this is an equality comparison of the address of a non-weak
10432 object against zero, then we know the result. */
10433 if (TREE_CODE (arg0) == ADDR_EXPR
10434 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10435 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10436 && integer_zerop (arg1))
10437 return constant_boolean_node (code != EQ_EXPR, type);
10439 /* If this is an equality comparison of the address of two non-weak,
10440 unaliased symbols neither of which are extern (since we do not
10441 have access to attributes for externs), then we know the result. */
10442 if (TREE_CODE (arg0) == ADDR_EXPR
10443 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10444 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10445 && ! lookup_attribute ("alias",
10446 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10447 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10448 && TREE_CODE (arg1) == ADDR_EXPR
10449 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10450 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10451 && ! lookup_attribute ("alias",
10452 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10453 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10455 /* We know that we're looking at the address of two
10456 non-weak, unaliased, static _DECL nodes.
10458 It is both wasteful and incorrect to call operand_equal_p
10459 to compare the two ADDR_EXPR nodes. It is wasteful in that
10460 all we need to do is test pointer equality for the arguments
10461 to the two ADDR_EXPR nodes. It is incorrect to use
10462 operand_equal_p as that function is NOT equivalent to a
10463 C equality test. It can in fact return false for two
10464 objects which would test as equal using the C equality
10466 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10467 return constant_boolean_node (equal
10468 ? code == EQ_EXPR : code != EQ_EXPR,
10472 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10473 a MINUS_EXPR of a constant, we can convert it into a comparison with
10474 a revised constant as long as no overflow occurs. */
10475 if (TREE_CODE (arg1) == INTEGER_CST
10476 && (TREE_CODE (arg0) == PLUS_EXPR
10477 || TREE_CODE (arg0) == MINUS_EXPR)
10478 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10479 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10480 ? MINUS_EXPR : PLUS_EXPR,
10481 fold_convert (TREE_TYPE (arg0), arg1),
10482 TREE_OPERAND (arg0, 1), 0))
10483 && ! TREE_CONSTANT_OVERFLOW (tem))
10484 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10486 /* Similarly for a NEGATE_EXPR. */
10487 if (TREE_CODE (arg0) == NEGATE_EXPR
10488 && TREE_CODE (arg1) == INTEGER_CST
10489 && 0 != (tem = negate_expr (arg1))
10490 && TREE_CODE (tem) == INTEGER_CST
10491 && ! TREE_CONSTANT_OVERFLOW (tem))
10492 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10494 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10495 for !=. Don't do this for ordered comparisons due to overflow. */
10496 if (TREE_CODE (arg0) == MINUS_EXPR
10497 && integer_zerop (arg1))
10498 return fold_build2 (code, type,
10499 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10501 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10502 if (TREE_CODE (arg0) == ABS_EXPR
10503 && (integer_zerop (arg1) || real_zerop (arg1)))
10504 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10506 /* If this is an EQ or NE comparison with zero and ARG0 is
10507 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10508 two operations, but the latter can be done in one less insn
10509 on machines that have only two-operand insns or on which a
10510 constant cannot be the first operand. */
10511 if (TREE_CODE (arg0) == BIT_AND_EXPR
10512 && integer_zerop (arg1))
10514 tree arg00 = TREE_OPERAND (arg0, 0);
10515 tree arg01 = TREE_OPERAND (arg0, 1);
10516 if (TREE_CODE (arg00) == LSHIFT_EXPR
10517 && integer_onep (TREE_OPERAND (arg00, 0)))
10519 fold_build2 (code, type,
10520 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10521 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10522 arg01, TREE_OPERAND (arg00, 1)),
10523 fold_convert (TREE_TYPE (arg0),
10524 integer_one_node)),
10526 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10527 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10529 fold_build2 (code, type,
10530 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10531 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10532 arg00, TREE_OPERAND (arg01, 1)),
10533 fold_convert (TREE_TYPE (arg0),
10534 integer_one_node)),
10538 /* If this is an NE or EQ comparison of zero against the result of a
10539 signed MOD operation whose second operand is a power of 2, make
10540 the MOD operation unsigned since it is simpler and equivalent. */
10541 if (integer_zerop (arg1)
10542 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10543 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10544 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10545 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10546 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10547 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10549 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10550 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10551 fold_convert (newtype,
10552 TREE_OPERAND (arg0, 0)),
10553 fold_convert (newtype,
10554 TREE_OPERAND (arg0, 1)));
10556 return fold_build2 (code, type, newmod,
10557 fold_convert (newtype, arg1));
10560 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10561 C1 is a valid shift constant, and C2 is a power of two, i.e.
10563 if (TREE_CODE (arg0) == BIT_AND_EXPR
10564 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10565 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10567 && integer_pow2p (TREE_OPERAND (arg0, 1))
10568 && integer_zerop (arg1))
10570 tree itype = TREE_TYPE (arg0);
10571 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10572 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10574 /* Check for a valid shift count. */
10575 if (TREE_INT_CST_HIGH (arg001) == 0
10576 && TREE_INT_CST_LOW (arg001) < prec)
10578 tree arg01 = TREE_OPERAND (arg0, 1);
10579 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10580 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10581 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10582 can be rewritten as (X & (C2 << C1)) != 0. */
10583 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10585 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10586 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10587 return fold_build2 (code, type, tem, arg1);
10589 /* Otherwise, for signed (arithmetic) shifts,
10590 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10591 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10592 else if (!TYPE_UNSIGNED (itype))
10593 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10594 arg000, build_int_cst (itype, 0));
10595 /* Otherwise, of unsigned (logical) shifts,
10596 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10597 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10599 return omit_one_operand (type,
10600 code == EQ_EXPR ? integer_one_node
10601 : integer_zero_node,
10606 /* If this is an NE comparison of zero with an AND of one, remove the
10607 comparison since the AND will give the correct value. */
10608 if (code == NE_EXPR
10609 && integer_zerop (arg1)
10610 && TREE_CODE (arg0) == BIT_AND_EXPR
10611 && integer_onep (TREE_OPERAND (arg0, 1)))
10612 return fold_convert (type, arg0);
10614 /* If we have (A & C) == C where C is a power of 2, convert this into
10615 (A & C) != 0. Similarly for NE_EXPR. */
10616 if (TREE_CODE (arg0) == BIT_AND_EXPR
10617 && integer_pow2p (TREE_OPERAND (arg0, 1))
10618 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10619 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10620 arg0, fold_convert (TREE_TYPE (arg0),
10621 integer_zero_node));
10623 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10624 bit, then fold the expression into A < 0 or A >= 0. */
10625 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10629 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10630 Similarly for NE_EXPR. */
10631 if (TREE_CODE (arg0) == BIT_AND_EXPR
10632 && TREE_CODE (arg1) == INTEGER_CST
10633 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10635 tree notc = fold_build1 (BIT_NOT_EXPR,
10636 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10637 TREE_OPERAND (arg0, 1));
10638 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10640 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10641 if (integer_nonzerop (dandnotc))
10642 return omit_one_operand (type, rslt, arg0);
10645 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10646 Similarly for NE_EXPR. */
10647 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10648 && TREE_CODE (arg1) == INTEGER_CST
10649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10651 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10652 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10653 TREE_OPERAND (arg0, 1), notd);
10654 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10655 if (integer_nonzerop (candnotd))
10656 return omit_one_operand (type, rslt, arg0);
10659 /* If this is a comparison of a field, we may be able to simplify it. */
10660 if (((TREE_CODE (arg0) == COMPONENT_REF
10661 && lang_hooks.can_use_bit_fields_p ())
10662 || TREE_CODE (arg0) == BIT_FIELD_REF)
10663 /* Handle the constant case even without -O
10664 to make sure the warnings are given. */
10665 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10667 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10672 /* Optimize comparisons of strlen vs zero to a compare of the
10673 first character of the string vs zero. To wit,
10674 strlen(ptr) == 0 => *ptr == 0
10675 strlen(ptr) != 0 => *ptr != 0
10676 Other cases should reduce to one of these two (or a constant)
10677 due to the return value of strlen being unsigned. */
10678 if (TREE_CODE (arg0) == CALL_EXPR
10679 && integer_zerop (arg1))
10681 tree fndecl = get_callee_fndecl (arg0);
10685 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10686 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10687 && (arglist = TREE_OPERAND (arg0, 1))
10688 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10689 && ! TREE_CHAIN (arglist))
10691 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10692 return fold_build2 (code, type, iref,
10693 build_int_cst (TREE_TYPE (iref), 0));
10697 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10698 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10699 if (TREE_CODE (arg0) == RSHIFT_EXPR
10700 && integer_zerop (arg1)
10701 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10703 tree arg00 = TREE_OPERAND (arg0, 0);
10704 tree arg01 = TREE_OPERAND (arg0, 1);
10705 tree itype = TREE_TYPE (arg00);
10706 if (TREE_INT_CST_HIGH (arg01) == 0
10707 && TREE_INT_CST_LOW (arg01)
10708 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10710 if (TYPE_UNSIGNED (itype))
10712 itype = lang_hooks.types.signed_type (itype);
10713 arg00 = fold_convert (itype, arg00);
10715 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10716 type, arg00, build_int_cst (itype, 0));
10720 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10721 if (integer_zerop (arg1)
10722 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10723 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10724 TREE_OPERAND (arg0, 1));
10726 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10727 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10728 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10729 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10730 build_int_cst (TREE_TYPE (arg1), 0));
10731 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10732 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10733 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10734 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10735 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10736 build_int_cst (TREE_TYPE (arg1), 0));
10738 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10739 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10740 && TREE_CODE (arg1) == INTEGER_CST
10741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10742 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10743 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10744 TREE_OPERAND (arg0, 1), arg1));
10746 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10747 (X & C) == 0 when C is a single bit. */
10748 if (TREE_CODE (arg0) == BIT_AND_EXPR
10749 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10750 && integer_zerop (arg1)
10751 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10753 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10754 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10755 TREE_OPERAND (arg0, 1));
10756 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10760 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10761 constant C is a power of two, i.e. a single bit. */
10762 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10763 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10764 && integer_zerop (arg1)
10765 && integer_pow2p (TREE_OPERAND (arg0, 1))
10766 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10767 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10769 tree arg00 = TREE_OPERAND (arg0, 0);
10770 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10771 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10774 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10775 when is C is a power of two, i.e. a single bit. */
10776 if (TREE_CODE (arg0) == BIT_AND_EXPR
10777 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10778 && integer_zerop (arg1)
10779 && integer_pow2p (TREE_OPERAND (arg0, 1))
10780 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10781 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10783 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10784 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10785 arg000, TREE_OPERAND (arg0, 1));
10786 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10787 tem, build_int_cst (TREE_TYPE (tem), 0));
10790 if (integer_zerop (arg1)
10791 && tree_expr_nonzero_p (arg0))
10793 tree res = constant_boolean_node (code==NE_EXPR, type);
10794 return omit_one_operand (type, res, arg0);
10797 /* Fold -X op -Y as X op Y, where op is eq/ne. */
10798 if (TREE_CODE (arg0) == NEGATE_EXPR
10799 && TREE_CODE (arg1) == NEGATE_EXPR)
10800 return fold_build2 (code, type,
10801 TREE_OPERAND (arg0, 0),
10802 TREE_OPERAND (arg1, 0));
10810 tem = fold_comparison (code, type, op0, op1);
10811 if (tem != NULL_TREE)
10814 /* Transform comparisons of the form X +- C CMP X. */
10815 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10816 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10817 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10818 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10819 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10820 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10821 && !(flag_wrapv || flag_trapv))))
10823 tree arg01 = TREE_OPERAND (arg0, 1);
10824 enum tree_code code0 = TREE_CODE (arg0);
10827 if (TREE_CODE (arg01) == REAL_CST)
10828 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10830 is_positive = tree_int_cst_sgn (arg01);
10832 /* (X - c) > X becomes false. */
10833 if (code == GT_EXPR
10834 && ((code0 == MINUS_EXPR && is_positive >= 0)
10835 || (code0 == PLUS_EXPR && is_positive <= 0)))
10836 return constant_boolean_node (0, type);
10838 /* Likewise (X + c) < X becomes false. */
10839 if (code == LT_EXPR
10840 && ((code0 == PLUS_EXPR && is_positive >= 0)
10841 || (code0 == MINUS_EXPR && is_positive <= 0)))
10842 return constant_boolean_node (0, type);
10844 /* Convert (X - c) <= X to true. */
10845 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10847 && ((code0 == MINUS_EXPR && is_positive >= 0)
10848 || (code0 == PLUS_EXPR && is_positive <= 0)))
10849 return constant_boolean_node (1, type);
10851 /* Convert (X + c) >= X to true. */
10852 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10854 && ((code0 == PLUS_EXPR && is_positive >= 0)
10855 || (code0 == MINUS_EXPR && is_positive <= 0)))
10856 return constant_boolean_node (1, type);
10858 if (TREE_CODE (arg01) == INTEGER_CST)
10860 /* Convert X + c > X and X - c < X to true for integers. */
10861 if (code == GT_EXPR
10862 && ((code0 == PLUS_EXPR && is_positive > 0)
10863 || (code0 == MINUS_EXPR && is_positive < 0)))
10864 return constant_boolean_node (1, type);
10866 if (code == LT_EXPR
10867 && ((code0 == MINUS_EXPR && is_positive > 0)
10868 || (code0 == PLUS_EXPR && is_positive < 0)))
10869 return constant_boolean_node (1, type);
10871 /* Convert X + c <= X and X - c >= X to false for integers. */
10872 if (code == LE_EXPR
10873 && ((code0 == PLUS_EXPR && is_positive > 0)
10874 || (code0 == MINUS_EXPR && is_positive < 0)))
10875 return constant_boolean_node (0, type);
10877 if (code == GE_EXPR
10878 && ((code0 == MINUS_EXPR && is_positive > 0)
10879 || (code0 == PLUS_EXPR && is_positive < 0)))
10880 return constant_boolean_node (0, type);
10884 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10885 This transformation affects the cases which are handled in later
10886 optimizations involving comparisons with non-negative constants. */
10887 if (TREE_CODE (arg1) == INTEGER_CST
10888 && TREE_CODE (arg0) != INTEGER_CST
10889 && tree_int_cst_sgn (arg1) > 0)
10891 if (code == GE_EXPR)
10893 arg1 = const_binop (MINUS_EXPR, arg1,
10894 build_int_cst (TREE_TYPE (arg1), 1), 0);
10895 return fold_build2 (GT_EXPR, type, arg0,
10896 fold_convert (TREE_TYPE (arg0), arg1));
10898 if (code == LT_EXPR)
10900 arg1 = const_binop (MINUS_EXPR, arg1,
10901 build_int_cst (TREE_TYPE (arg1), 1), 0);
10902 return fold_build2 (LE_EXPR, type, arg0,
10903 fold_convert (TREE_TYPE (arg0), arg1));
10907 /* Comparisons with the highest or lowest possible integer of
10908 the specified size will have known values. */
10910 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10912 if (TREE_CODE (arg1) == INTEGER_CST
10913 && ! TREE_CONSTANT_OVERFLOW (arg1)
10914 && width <= 2 * HOST_BITS_PER_WIDE_INT
10915 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10916 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10918 HOST_WIDE_INT signed_max_hi;
10919 unsigned HOST_WIDE_INT signed_max_lo;
10920 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10922 if (width <= HOST_BITS_PER_WIDE_INT)
10924 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10929 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10931 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10937 max_lo = signed_max_lo;
10938 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10944 width -= HOST_BITS_PER_WIDE_INT;
10945 signed_max_lo = -1;
10946 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10951 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10953 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10958 max_hi = signed_max_hi;
10959 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10963 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10964 && TREE_INT_CST_LOW (arg1) == max_lo)
10968 return omit_one_operand (type, integer_zero_node, arg0);
10971 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10974 return omit_one_operand (type, integer_one_node, arg0);
10977 return fold_build2 (NE_EXPR, type, arg0, arg1);
10979 /* The GE_EXPR and LT_EXPR cases above are not normally
10980 reached because of previous transformations. */
10985 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10987 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10991 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10992 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10994 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10995 return fold_build2 (NE_EXPR, type, arg0, arg1);
10999 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11001 && TREE_INT_CST_LOW (arg1) == min_lo)
11005 return omit_one_operand (type, integer_zero_node, arg0);
11008 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11011 return omit_one_operand (type, integer_one_node, arg0);
11014 return fold_build2 (NE_EXPR, type, op0, op1);
11019 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11021 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11025 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11026 return fold_build2 (NE_EXPR, type, arg0, arg1);
11028 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11029 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11034 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11035 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11036 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11037 /* signed_type does not work on pointer types. */
11038 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11040 /* The following case also applies to X < signed_max+1
11041 and X >= signed_max+1 because previous transformations. */
11042 if (code == LE_EXPR || code == GT_EXPR)
11045 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11046 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11047 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11048 type, fold_convert (st0, arg0),
11049 build_int_cst (st1, 0));
11055 /* If we are comparing an ABS_EXPR with a constant, we can
11056 convert all the cases into explicit comparisons, but they may
11057 well not be faster than doing the ABS and one comparison.
11058 But ABS (X) <= C is a range comparison, which becomes a subtraction
11059 and a comparison, and is probably faster. */
11060 if (code == LE_EXPR
11061 && TREE_CODE (arg1) == INTEGER_CST
11062 && TREE_CODE (arg0) == ABS_EXPR
11063 && ! TREE_SIDE_EFFECTS (arg0)
11064 && (0 != (tem = negate_expr (arg1)))
11065 && TREE_CODE (tem) == INTEGER_CST
11066 && ! TREE_CONSTANT_OVERFLOW (tem))
11067 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11068 build2 (GE_EXPR, type,
11069 TREE_OPERAND (arg0, 0), tem),
11070 build2 (LE_EXPR, type,
11071 TREE_OPERAND (arg0, 0), arg1));
11073 /* Convert ABS_EXPR<x> >= 0 to true. */
11074 if (code == GE_EXPR
11075 && tree_expr_nonnegative_p (arg0)
11076 && (integer_zerop (arg1)
11077 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11078 && real_zerop (arg1))))
11079 return omit_one_operand (type, integer_one_node, arg0);
11081 /* Convert ABS_EXPR<x> < 0 to false. */
11082 if (code == LT_EXPR
11083 && tree_expr_nonnegative_p (arg0)
11084 && (integer_zerop (arg1) || real_zerop (arg1)))
11085 return omit_one_operand (type, integer_zero_node, arg0);
11087 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11088 and similarly for >= into !=. */
11089 if ((code == LT_EXPR || code == GE_EXPR)
11090 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11091 && TREE_CODE (arg1) == LSHIFT_EXPR
11092 && integer_onep (TREE_OPERAND (arg1, 0)))
11093 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11094 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11095 TREE_OPERAND (arg1, 1)),
11096 build_int_cst (TREE_TYPE (arg0), 0));
11098 if ((code == LT_EXPR || code == GE_EXPR)
11099 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11100 && (TREE_CODE (arg1) == NOP_EXPR
11101 || TREE_CODE (arg1) == CONVERT_EXPR)
11102 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11103 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11105 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11106 fold_convert (TREE_TYPE (arg0),
11107 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11108 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11110 build_int_cst (TREE_TYPE (arg0), 0));
11114 case UNORDERED_EXPR:
11122 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11124 t1 = fold_relational_const (code, type, arg0, arg1);
11125 if (t1 != NULL_TREE)
11129 /* If the first operand is NaN, the result is constant. */
11130 if (TREE_CODE (arg0) == REAL_CST
11131 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11132 && (code != LTGT_EXPR || ! flag_trapping_math))
11134 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11135 ? integer_zero_node
11136 : integer_one_node;
11137 return omit_one_operand (type, t1, arg1);
11140 /* If the second operand is NaN, the result is constant. */
11141 if (TREE_CODE (arg1) == REAL_CST
11142 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11143 && (code != LTGT_EXPR || ! flag_trapping_math))
11145 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11146 ? integer_zero_node
11147 : integer_one_node;
11148 return omit_one_operand (type, t1, arg0);
11151 /* Simplify unordered comparison of something with itself. */
11152 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11153 && operand_equal_p (arg0, arg1, 0))
11154 return constant_boolean_node (1, type);
11156 if (code == LTGT_EXPR
11157 && !flag_trapping_math
11158 && operand_equal_p (arg0, arg1, 0))
11159 return constant_boolean_node (0, type);
11161 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11163 tree targ0 = strip_float_extensions (arg0);
11164 tree targ1 = strip_float_extensions (arg1);
11165 tree newtype = TREE_TYPE (targ0);
11167 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11168 newtype = TREE_TYPE (targ1);
11170 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11171 return fold_build2 (code, type, fold_convert (newtype, targ0),
11172 fold_convert (newtype, targ1));
11177 case COMPOUND_EXPR:
11178 /* When pedantic, a compound expression can be neither an lvalue
11179 nor an integer constant expression. */
11180 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11182 /* Don't let (0, 0) be null pointer constant. */
11183 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11184 : fold_convert (type, arg1);
11185 return pedantic_non_lvalue (tem);
11188 if ((TREE_CODE (arg0) == REAL_CST
11189 && TREE_CODE (arg1) == REAL_CST)
11190 || (TREE_CODE (arg0) == INTEGER_CST
11191 && TREE_CODE (arg1) == INTEGER_CST))
11192 return build_complex (type, arg0, arg1);
11196 /* An ASSERT_EXPR should never be passed to fold_binary. */
11197 gcc_unreachable ();
11201 } /* switch (code) */
11204 /* Callback for walk_tree, looking for LABEL_EXPR.
11205 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11206 Do not check the sub-tree of GOTO_EXPR. */
11209 contains_label_1 (tree *tp,
11210 int *walk_subtrees,
11211 void *data ATTRIBUTE_UNUSED)
11213 switch (TREE_CODE (*tp))
11218 *walk_subtrees = 0;
11225 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11226 accessible from outside the sub-tree. Returns NULL_TREE if no
11227 addressable label is found. */
11230 contains_label_p (tree st)
11232 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11235 /* Fold a ternary expression of code CODE and type TYPE with operands
11236 OP0, OP1, and OP2. Return the folded expression if folding is
11237 successful. Otherwise, return NULL_TREE. */
11240 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11243 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11244 enum tree_code_class kind = TREE_CODE_CLASS (code);
11246 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11247 && TREE_CODE_LENGTH (code) == 3);
11249 /* Strip any conversions that don't change the mode. This is safe
11250 for every expression, except for a comparison expression because
11251 its signedness is derived from its operands. So, in the latter
11252 case, only strip conversions that don't change the signedness.
11254 Note that this is done as an internal manipulation within the
11255 constant folder, in order to find the simplest representation of
11256 the arguments so that their form can be studied. In any cases,
11257 the appropriate type conversions should be put back in the tree
11258 that will get out of the constant folder. */
11273 case COMPONENT_REF:
11274 if (TREE_CODE (arg0) == CONSTRUCTOR
11275 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11277 unsigned HOST_WIDE_INT idx;
11279 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11286 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11287 so all simple results must be passed through pedantic_non_lvalue. */
11288 if (TREE_CODE (arg0) == INTEGER_CST)
11290 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11291 tem = integer_zerop (arg0) ? op2 : op1;
11292 /* Only optimize constant conditions when the selected branch
11293 has the same type as the COND_EXPR. This avoids optimizing
11294 away "c ? x : throw", where the throw has a void type.
11295 Avoid throwing away that operand which contains label. */
11296 if ((!TREE_SIDE_EFFECTS (unused_op)
11297 || !contains_label_p (unused_op))
11298 && (! VOID_TYPE_P (TREE_TYPE (tem))
11299 || VOID_TYPE_P (type)))
11300 return pedantic_non_lvalue (tem);
11303 if (operand_equal_p (arg1, op2, 0))
11304 return pedantic_omit_one_operand (type, arg1, arg0);
11306 /* If we have A op B ? A : C, we may be able to convert this to a
11307 simpler expression, depending on the operation and the values
11308 of B and C. Signed zeros prevent all of these transformations,
11309 for reasons given above each one.
11311 Also try swapping the arguments and inverting the conditional. */
11312 if (COMPARISON_CLASS_P (arg0)
11313 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11314 arg1, TREE_OPERAND (arg0, 1))
11315 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11317 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11322 if (COMPARISON_CLASS_P (arg0)
11323 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11325 TREE_OPERAND (arg0, 1))
11326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11328 tem = fold_truth_not_expr (arg0);
11329 if (tem && COMPARISON_CLASS_P (tem))
11331 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11337 /* If the second operand is simpler than the third, swap them
11338 since that produces better jump optimization results. */
11339 if (truth_value_p (TREE_CODE (arg0))
11340 && tree_swap_operands_p (op1, op2, false))
11342 /* See if this can be inverted. If it can't, possibly because
11343 it was a floating-point inequality comparison, don't do
11345 tem = fold_truth_not_expr (arg0);
11347 return fold_build3 (code, type, tem, op2, op1);
11350 /* Convert A ? 1 : 0 to simply A. */
11351 if (integer_onep (op1)
11352 && integer_zerop (op2)
11353 /* If we try to convert OP0 to our type, the
11354 call to fold will try to move the conversion inside
11355 a COND, which will recurse. In that case, the COND_EXPR
11356 is probably the best choice, so leave it alone. */
11357 && type == TREE_TYPE (arg0))
11358 return pedantic_non_lvalue (arg0);
11360 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11361 over COND_EXPR in cases such as floating point comparisons. */
11362 if (integer_zerop (op1)
11363 && integer_onep (op2)
11364 && truth_value_p (TREE_CODE (arg0)))
11365 return pedantic_non_lvalue (fold_convert (type,
11366 invert_truthvalue (arg0)));
11368 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11369 if (TREE_CODE (arg0) == LT_EXPR
11370 && integer_zerop (TREE_OPERAND (arg0, 1))
11371 && integer_zerop (op2)
11372 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11374 /* sign_bit_p only checks ARG1 bits within A's precision.
11375 If <sign bit of A> has wider type than A, bits outside
11376 of A's precision in <sign bit of A> need to be checked.
11377 If they are all 0, this optimization needs to be done
11378 in unsigned A's type, if they are all 1 in signed A's type,
11379 otherwise this can't be done. */
11380 if (TYPE_PRECISION (TREE_TYPE (tem))
11381 < TYPE_PRECISION (TREE_TYPE (arg1))
11382 && TYPE_PRECISION (TREE_TYPE (tem))
11383 < TYPE_PRECISION (type))
11385 unsigned HOST_WIDE_INT mask_lo;
11386 HOST_WIDE_INT mask_hi;
11387 int inner_width, outer_width;
11390 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11391 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11392 if (outer_width > TYPE_PRECISION (type))
11393 outer_width = TYPE_PRECISION (type);
11395 if (outer_width > HOST_BITS_PER_WIDE_INT)
11397 mask_hi = ((unsigned HOST_WIDE_INT) -1
11398 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11404 mask_lo = ((unsigned HOST_WIDE_INT) -1
11405 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11407 if (inner_width > HOST_BITS_PER_WIDE_INT)
11409 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11410 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11414 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11415 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11417 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11418 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11420 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11421 tem = fold_convert (tem_type, tem);
11423 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11424 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11426 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11427 tem = fold_convert (tem_type, tem);
11434 return fold_convert (type,
11435 fold_build2 (BIT_AND_EXPR,
11436 TREE_TYPE (tem), tem,
11437 fold_convert (TREE_TYPE (tem),
11441 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11442 already handled above. */
11443 if (TREE_CODE (arg0) == BIT_AND_EXPR
11444 && integer_onep (TREE_OPERAND (arg0, 1))
11445 && integer_zerop (op2)
11446 && integer_pow2p (arg1))
11448 tree tem = TREE_OPERAND (arg0, 0);
11450 if (TREE_CODE (tem) == RSHIFT_EXPR
11451 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11452 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11453 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11454 return fold_build2 (BIT_AND_EXPR, type,
11455 TREE_OPERAND (tem, 0), arg1);
11458 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11459 is probably obsolete because the first operand should be a
11460 truth value (that's why we have the two cases above), but let's
11461 leave it in until we can confirm this for all front-ends. */
11462 if (integer_zerop (op2)
11463 && TREE_CODE (arg0) == NE_EXPR
11464 && integer_zerop (TREE_OPERAND (arg0, 1))
11465 && integer_pow2p (arg1)
11466 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11467 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11468 arg1, OEP_ONLY_CONST))
11469 return pedantic_non_lvalue (fold_convert (type,
11470 TREE_OPERAND (arg0, 0)));
11472 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11473 if (integer_zerop (op2)
11474 && truth_value_p (TREE_CODE (arg0))
11475 && truth_value_p (TREE_CODE (arg1)))
11476 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11477 fold_convert (type, arg0),
11480 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11481 if (integer_onep (op2)
11482 && truth_value_p (TREE_CODE (arg0))
11483 && truth_value_p (TREE_CODE (arg1)))
11485 /* Only perform transformation if ARG0 is easily inverted. */
11486 tem = fold_truth_not_expr (arg0);
11488 return fold_build2 (TRUTH_ORIF_EXPR, type,
11489 fold_convert (type, tem),
11493 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11494 if (integer_zerop (arg1)
11495 && truth_value_p (TREE_CODE (arg0))
11496 && truth_value_p (TREE_CODE (op2)))
11498 /* Only perform transformation if ARG0 is easily inverted. */
11499 tem = fold_truth_not_expr (arg0);
11501 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11502 fold_convert (type, tem),
11506 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11507 if (integer_onep (arg1)
11508 && truth_value_p (TREE_CODE (arg0))
11509 && truth_value_p (TREE_CODE (op2)))
11510 return fold_build2 (TRUTH_ORIF_EXPR, type,
11511 fold_convert (type, arg0),
11517 /* Check for a built-in function. */
11518 if (TREE_CODE (op0) == ADDR_EXPR
11519 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11520 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11521 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11524 case BIT_FIELD_REF:
11525 if (TREE_CODE (arg0) == VECTOR_CST
11526 && type == TREE_TYPE (TREE_TYPE (arg0))
11527 && host_integerp (arg1, 1)
11528 && host_integerp (op2, 1))
11530 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11531 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11534 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11535 && (idx % width) == 0
11536 && (idx = idx / width)
11537 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11539 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11540 while (idx-- > 0 && elements)
11541 elements = TREE_CHAIN (elements);
11543 return TREE_VALUE (elements);
11545 return fold_convert (type, integer_zero_node);
11552 } /* switch (code) */
11555 /* Perform constant folding and related simplification of EXPR.
11556 The related simplifications include x*1 => x, x*0 => 0, etc.,
11557 and application of the associative law.
11558 NOP_EXPR conversions may be removed freely (as long as we
11559 are careful not to change the type of the overall expression).
11560 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11561 but we can constant-fold them if they have constant operands. */
11563 #ifdef ENABLE_FOLD_CHECKING
11564 # define fold(x) fold_1 (x)
11565 static tree fold_1 (tree);
11571 const tree t = expr;
11572 enum tree_code code = TREE_CODE (t);
11573 enum tree_code_class kind = TREE_CODE_CLASS (code);
11576 /* Return right away if a constant. */
11577 if (kind == tcc_constant)
11580 if (IS_EXPR_CODE_CLASS (kind))
11582 tree type = TREE_TYPE (t);
11583 tree op0, op1, op2;
11585 switch (TREE_CODE_LENGTH (code))
11588 op0 = TREE_OPERAND (t, 0);
11589 tem = fold_unary (code, type, op0);
11590 return tem ? tem : expr;
11592 op0 = TREE_OPERAND (t, 0);
11593 op1 = TREE_OPERAND (t, 1);
11594 tem = fold_binary (code, type, op0, op1);
11595 return tem ? tem : expr;
11597 op0 = TREE_OPERAND (t, 0);
11598 op1 = TREE_OPERAND (t, 1);
11599 op2 = TREE_OPERAND (t, 2);
11600 tem = fold_ternary (code, type, op0, op1, op2);
11601 return tem ? tem : expr;
11610 return fold (DECL_INITIAL (t));
11614 } /* switch (code) */
11617 #ifdef ENABLE_FOLD_CHECKING
11620 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11621 static void fold_check_failed (tree, tree);
11622 void print_fold_checksum (tree);
11624 /* When --enable-checking=fold, compute a digest of expr before
11625 and after actual fold call to see if fold did not accidentally
11626 change original expr. */
11632 struct md5_ctx ctx;
11633 unsigned char checksum_before[16], checksum_after[16];
11636 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11637 md5_init_ctx (&ctx);
11638 fold_checksum_tree (expr, &ctx, ht);
11639 md5_finish_ctx (&ctx, checksum_before);
11642 ret = fold_1 (expr);
11644 md5_init_ctx (&ctx);
11645 fold_checksum_tree (expr, &ctx, ht);
11646 md5_finish_ctx (&ctx, checksum_after);
11649 if (memcmp (checksum_before, checksum_after, 16))
11650 fold_check_failed (expr, ret);
11656 print_fold_checksum (tree expr)
11658 struct md5_ctx ctx;
11659 unsigned char checksum[16], cnt;
11662 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11663 md5_init_ctx (&ctx);
11664 fold_checksum_tree (expr, &ctx, ht);
11665 md5_finish_ctx (&ctx, checksum);
11667 for (cnt = 0; cnt < 16; ++cnt)
11668 fprintf (stderr, "%02x", checksum[cnt]);
11669 putc ('\n', stderr);
11673 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11675 internal_error ("fold check: original tree changed by fold");
11679 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11682 enum tree_code code;
11683 struct tree_function_decl buf;
11688 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11689 <= sizeof (struct tree_function_decl))
11690 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11693 slot = htab_find_slot (ht, expr, INSERT);
11697 code = TREE_CODE (expr);
11698 if (TREE_CODE_CLASS (code) == tcc_declaration
11699 && DECL_ASSEMBLER_NAME_SET_P (expr))
11701 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11702 memcpy ((char *) &buf, expr, tree_size (expr));
11703 expr = (tree) &buf;
11704 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11706 else if (TREE_CODE_CLASS (code) == tcc_type
11707 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11708 || TYPE_CACHED_VALUES_P (expr)
11709 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11711 /* Allow these fields to be modified. */
11712 memcpy ((char *) &buf, expr, tree_size (expr));
11713 expr = (tree) &buf;
11714 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11715 TYPE_POINTER_TO (expr) = NULL;
11716 TYPE_REFERENCE_TO (expr) = NULL;
11717 if (TYPE_CACHED_VALUES_P (expr))
11719 TYPE_CACHED_VALUES_P (expr) = 0;
11720 TYPE_CACHED_VALUES (expr) = NULL;
11723 md5_process_bytes (expr, tree_size (expr), ctx);
11724 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11725 if (TREE_CODE_CLASS (code) != tcc_type
11726 && TREE_CODE_CLASS (code) != tcc_declaration
11727 && code != TREE_LIST)
11728 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11729 switch (TREE_CODE_CLASS (code))
11735 md5_process_bytes (TREE_STRING_POINTER (expr),
11736 TREE_STRING_LENGTH (expr), ctx);
11739 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11740 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11743 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11749 case tcc_exceptional:
11753 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11754 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11755 expr = TREE_CHAIN (expr);
11756 goto recursive_label;
11759 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11760 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11766 case tcc_expression:
11767 case tcc_reference:
11768 case tcc_comparison:
11771 case tcc_statement:
11772 len = TREE_CODE_LENGTH (code);
11773 for (i = 0; i < len; ++i)
11774 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11776 case tcc_declaration:
11777 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11778 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11779 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11781 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11782 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11783 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11784 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11785 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11787 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11788 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11790 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11792 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11793 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11794 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11798 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11799 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11800 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11801 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11802 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11803 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11804 if (INTEGRAL_TYPE_P (expr)
11805 || SCALAR_FLOAT_TYPE_P (expr))
11807 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11808 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11810 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11811 if (TREE_CODE (expr) == RECORD_TYPE
11812 || TREE_CODE (expr) == UNION_TYPE
11813 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11814 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11815 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11824 /* Fold a unary tree expression with code CODE of type TYPE with an
11825 operand OP0. Return a folded expression if successful. Otherwise,
11826 return a tree expression with code CODE of type TYPE with an
11830 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11833 #ifdef ENABLE_FOLD_CHECKING
11834 unsigned char checksum_before[16], checksum_after[16];
11835 struct md5_ctx ctx;
11838 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11839 md5_init_ctx (&ctx);
11840 fold_checksum_tree (op0, &ctx, ht);
11841 md5_finish_ctx (&ctx, checksum_before);
11845 tem = fold_unary (code, type, op0);
11847 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11849 #ifdef ENABLE_FOLD_CHECKING
11850 md5_init_ctx (&ctx);
11851 fold_checksum_tree (op0, &ctx, ht);
11852 md5_finish_ctx (&ctx, checksum_after);
11855 if (memcmp (checksum_before, checksum_after, 16))
11856 fold_check_failed (op0, tem);
11861 /* Fold a binary tree expression with code CODE of type TYPE with
11862 operands OP0 and OP1. Return a folded expression if successful.
11863 Otherwise, return a tree expression with code CODE of type TYPE
11864 with operands OP0 and OP1. */
11867 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11871 #ifdef ENABLE_FOLD_CHECKING
11872 unsigned char checksum_before_op0[16],
11873 checksum_before_op1[16],
11874 checksum_after_op0[16],
11875 checksum_after_op1[16];
11876 struct md5_ctx ctx;
11879 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11880 md5_init_ctx (&ctx);
11881 fold_checksum_tree (op0, &ctx, ht);
11882 md5_finish_ctx (&ctx, checksum_before_op0);
11885 md5_init_ctx (&ctx);
11886 fold_checksum_tree (op1, &ctx, ht);
11887 md5_finish_ctx (&ctx, checksum_before_op1);
11891 tem = fold_binary (code, type, op0, op1);
11893 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11895 #ifdef ENABLE_FOLD_CHECKING
11896 md5_init_ctx (&ctx);
11897 fold_checksum_tree (op0, &ctx, ht);
11898 md5_finish_ctx (&ctx, checksum_after_op0);
11901 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11902 fold_check_failed (op0, tem);
11904 md5_init_ctx (&ctx);
11905 fold_checksum_tree (op1, &ctx, ht);
11906 md5_finish_ctx (&ctx, checksum_after_op1);
11909 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11910 fold_check_failed (op1, tem);
11915 /* Fold a ternary tree expression with code CODE of type TYPE with
11916 operands OP0, OP1, and OP2. Return a folded expression if
11917 successful. Otherwise, return a tree expression with code CODE of
11918 type TYPE with operands OP0, OP1, and OP2. */
11921 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11925 #ifdef ENABLE_FOLD_CHECKING
11926 unsigned char checksum_before_op0[16],
11927 checksum_before_op1[16],
11928 checksum_before_op2[16],
11929 checksum_after_op0[16],
11930 checksum_after_op1[16],
11931 checksum_after_op2[16];
11932 struct md5_ctx ctx;
11935 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11936 md5_init_ctx (&ctx);
11937 fold_checksum_tree (op0, &ctx, ht);
11938 md5_finish_ctx (&ctx, checksum_before_op0);
11941 md5_init_ctx (&ctx);
11942 fold_checksum_tree (op1, &ctx, ht);
11943 md5_finish_ctx (&ctx, checksum_before_op1);
11946 md5_init_ctx (&ctx);
11947 fold_checksum_tree (op2, &ctx, ht);
11948 md5_finish_ctx (&ctx, checksum_before_op2);
11952 tem = fold_ternary (code, type, op0, op1, op2);
11954 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11956 #ifdef ENABLE_FOLD_CHECKING
11957 md5_init_ctx (&ctx);
11958 fold_checksum_tree (op0, &ctx, ht);
11959 md5_finish_ctx (&ctx, checksum_after_op0);
11962 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11963 fold_check_failed (op0, tem);
11965 md5_init_ctx (&ctx);
11966 fold_checksum_tree (op1, &ctx, ht);
11967 md5_finish_ctx (&ctx, checksum_after_op1);
11970 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11971 fold_check_failed (op1, tem);
11973 md5_init_ctx (&ctx);
11974 fold_checksum_tree (op2, &ctx, ht);
11975 md5_finish_ctx (&ctx, checksum_after_op2);
11978 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11979 fold_check_failed (op2, tem);
11984 /* Perform constant folding and related simplification of initializer
11985 expression EXPR. These behave identically to "fold_buildN" but ignore
11986 potential run-time traps and exceptions that fold must preserve. */
11988 #define START_FOLD_INIT \
11989 int saved_signaling_nans = flag_signaling_nans;\
11990 int saved_trapping_math = flag_trapping_math;\
11991 int saved_rounding_math = flag_rounding_math;\
11992 int saved_trapv = flag_trapv;\
11993 int saved_folding_initializer = folding_initializer;\
11994 flag_signaling_nans = 0;\
11995 flag_trapping_math = 0;\
11996 flag_rounding_math = 0;\
11998 folding_initializer = 1;
12000 #define END_FOLD_INIT \
12001 flag_signaling_nans = saved_signaling_nans;\
12002 flag_trapping_math = saved_trapping_math;\
12003 flag_rounding_math = saved_rounding_math;\
12004 flag_trapv = saved_trapv;\
12005 folding_initializer = saved_folding_initializer;
12008 fold_build1_initializer (enum tree_code code, tree type, tree op)
12013 result = fold_build1 (code, type, op);
12020 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12025 result = fold_build2 (code, type, op0, op1);
12032 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12038 result = fold_build3 (code, type, op0, op1, op2);
12044 #undef START_FOLD_INIT
12045 #undef END_FOLD_INIT
12047 /* Determine if first argument is a multiple of second argument. Return 0 if
12048 it is not, or we cannot easily determined it to be.
12050 An example of the sort of thing we care about (at this point; this routine
12051 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12052 fold cases do now) is discovering that
12054 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12060 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12062 This code also handles discovering that
12064 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12066 is a multiple of 8 so we don't have to worry about dealing with a
12067 possible remainder.
12069 Note that we *look* inside a SAVE_EXPR only to determine how it was
12070 calculated; it is not safe for fold to do much of anything else with the
12071 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12072 at run time. For example, the latter example above *cannot* be implemented
12073 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12074 evaluation time of the original SAVE_EXPR is not necessarily the same at
12075 the time the new expression is evaluated. The only optimization of this
12076 sort that would be valid is changing
12078 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12082 SAVE_EXPR (I) * SAVE_EXPR (J)
12084 (where the same SAVE_EXPR (J) is used in the original and the
12085 transformed version). */
12088 multiple_of_p (tree type, tree top, tree bottom)
12090 if (operand_equal_p (top, bottom, 0))
12093 if (TREE_CODE (type) != INTEGER_TYPE)
12096 switch (TREE_CODE (top))
12099 /* Bitwise and provides a power of two multiple. If the mask is
12100 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12101 if (!integer_pow2p (bottom))
12106 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12107 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12111 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12112 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12115 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12119 op1 = TREE_OPERAND (top, 1);
12120 /* const_binop may not detect overflow correctly,
12121 so check for it explicitly here. */
12122 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12123 > TREE_INT_CST_LOW (op1)
12124 && TREE_INT_CST_HIGH (op1) == 0
12125 && 0 != (t1 = fold_convert (type,
12126 const_binop (LSHIFT_EXPR,
12129 && ! TREE_OVERFLOW (t1))
12130 return multiple_of_p (type, t1, bottom);
12135 /* Can't handle conversions from non-integral or wider integral type. */
12136 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12137 || (TYPE_PRECISION (type)
12138 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12141 /* .. fall through ... */
12144 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12147 if (TREE_CODE (bottom) != INTEGER_CST
12148 || (TYPE_UNSIGNED (type)
12149 && (tree_int_cst_sgn (top) < 0
12150 || tree_int_cst_sgn (bottom) < 0)))
12152 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12160 /* Return true if `t' is known to be non-negative. */
12163 tree_expr_nonnegative_p (tree t)
12165 if (t == error_mark_node)
12168 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12171 switch (TREE_CODE (t))
12174 /* Query VRP to see if it has recorded any information about
12175 the range of this object. */
12176 return ssa_name_nonnegative_p (t);
12179 /* We can't return 1 if flag_wrapv is set because
12180 ABS_EXPR<INT_MIN> = INT_MIN. */
12181 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12186 return tree_int_cst_sgn (t) >= 0;
12189 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12192 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12193 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12194 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12196 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12197 both unsigned and at least 2 bits shorter than the result. */
12198 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12199 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12200 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12202 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12203 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12204 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12205 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12207 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12208 TYPE_PRECISION (inner2)) + 1;
12209 return prec < TYPE_PRECISION (TREE_TYPE (t));
12215 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12217 /* x * x for floating point x is always non-negative. */
12218 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12220 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12221 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12224 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12225 both unsigned and their total bits is shorter than the result. */
12226 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12227 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12228 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12230 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12231 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12232 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12233 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12234 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12235 < TYPE_PRECISION (TREE_TYPE (t));
12241 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12242 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12248 case TRUNC_DIV_EXPR:
12249 case CEIL_DIV_EXPR:
12250 case FLOOR_DIV_EXPR:
12251 case ROUND_DIV_EXPR:
12252 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12253 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12255 case TRUNC_MOD_EXPR:
12256 case CEIL_MOD_EXPR:
12257 case FLOOR_MOD_EXPR:
12258 case ROUND_MOD_EXPR:
12260 case NON_LVALUE_EXPR:
12262 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12264 case COMPOUND_EXPR:
12266 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12269 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12272 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12273 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12277 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12278 tree outer_type = TREE_TYPE (t);
12280 if (TREE_CODE (outer_type) == REAL_TYPE)
12282 if (TREE_CODE (inner_type) == REAL_TYPE)
12283 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12284 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12286 if (TYPE_UNSIGNED (inner_type))
12288 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12291 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12293 if (TREE_CODE (inner_type) == REAL_TYPE)
12294 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12295 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12296 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12297 && TYPE_UNSIGNED (inner_type);
12304 tree temp = TARGET_EXPR_SLOT (t);
12305 t = TARGET_EXPR_INITIAL (t);
12307 /* If the initializer is non-void, then it's a normal expression
12308 that will be assigned to the slot. */
12309 if (!VOID_TYPE_P (t))
12310 return tree_expr_nonnegative_p (t);
12312 /* Otherwise, the initializer sets the slot in some way. One common
12313 way is an assignment statement at the end of the initializer. */
12316 if (TREE_CODE (t) == BIND_EXPR)
12317 t = expr_last (BIND_EXPR_BODY (t));
12318 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12319 || TREE_CODE (t) == TRY_CATCH_EXPR)
12320 t = expr_last (TREE_OPERAND (t, 0));
12321 else if (TREE_CODE (t) == STATEMENT_LIST)
12326 if (TREE_CODE (t) == MODIFY_EXPR
12327 && TREE_OPERAND (t, 0) == temp)
12328 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12335 tree fndecl = get_callee_fndecl (t);
12336 tree arglist = TREE_OPERAND (t, 1);
12337 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12338 switch (DECL_FUNCTION_CODE (fndecl))
12340 CASE_FLT_FN (BUILT_IN_ACOS):
12341 CASE_FLT_FN (BUILT_IN_ACOSH):
12342 CASE_FLT_FN (BUILT_IN_CABS):
12343 CASE_FLT_FN (BUILT_IN_COSH):
12344 CASE_FLT_FN (BUILT_IN_ERFC):
12345 CASE_FLT_FN (BUILT_IN_EXP):
12346 CASE_FLT_FN (BUILT_IN_EXP10):
12347 CASE_FLT_FN (BUILT_IN_EXP2):
12348 CASE_FLT_FN (BUILT_IN_FABS):
12349 CASE_FLT_FN (BUILT_IN_FDIM):
12350 CASE_FLT_FN (BUILT_IN_HYPOT):
12351 CASE_FLT_FN (BUILT_IN_POW10):
12352 CASE_INT_FN (BUILT_IN_FFS):
12353 CASE_INT_FN (BUILT_IN_PARITY):
12354 CASE_INT_FN (BUILT_IN_POPCOUNT):
12355 case BUILT_IN_BSWAP32:
12356 case BUILT_IN_BSWAP64:
12360 CASE_FLT_FN (BUILT_IN_SQRT):
12361 /* sqrt(-0.0) is -0.0. */
12362 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12364 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12366 CASE_FLT_FN (BUILT_IN_ASINH):
12367 CASE_FLT_FN (BUILT_IN_ATAN):
12368 CASE_FLT_FN (BUILT_IN_ATANH):
12369 CASE_FLT_FN (BUILT_IN_CBRT):
12370 CASE_FLT_FN (BUILT_IN_CEIL):
12371 CASE_FLT_FN (BUILT_IN_ERF):
12372 CASE_FLT_FN (BUILT_IN_EXPM1):
12373 CASE_FLT_FN (BUILT_IN_FLOOR):
12374 CASE_FLT_FN (BUILT_IN_FMOD):
12375 CASE_FLT_FN (BUILT_IN_FREXP):
12376 CASE_FLT_FN (BUILT_IN_LCEIL):
12377 CASE_FLT_FN (BUILT_IN_LDEXP):
12378 CASE_FLT_FN (BUILT_IN_LFLOOR):
12379 CASE_FLT_FN (BUILT_IN_LLCEIL):
12380 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12381 CASE_FLT_FN (BUILT_IN_LLRINT):
12382 CASE_FLT_FN (BUILT_IN_LLROUND):
12383 CASE_FLT_FN (BUILT_IN_LRINT):
12384 CASE_FLT_FN (BUILT_IN_LROUND):
12385 CASE_FLT_FN (BUILT_IN_MODF):
12386 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12387 CASE_FLT_FN (BUILT_IN_RINT):
12388 CASE_FLT_FN (BUILT_IN_ROUND):
12389 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12390 CASE_FLT_FN (BUILT_IN_SINH):
12391 CASE_FLT_FN (BUILT_IN_TANH):
12392 CASE_FLT_FN (BUILT_IN_TRUNC):
12393 /* True if the 1st argument is nonnegative. */
12394 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12396 CASE_FLT_FN (BUILT_IN_FMAX):
12397 /* True if the 1st OR 2nd arguments are nonnegative. */
12398 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12399 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12401 CASE_FLT_FN (BUILT_IN_FMIN):
12402 /* True if the 1st AND 2nd arguments are nonnegative. */
12403 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12404 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12406 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12407 /* True if the 2nd argument is nonnegative. */
12408 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12410 CASE_FLT_FN (BUILT_IN_POWI):
12411 /* True if the 1st argument is nonnegative or the second
12412 argument is an even integer. */
12413 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12415 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12416 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12419 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12421 CASE_FLT_FN (BUILT_IN_POW):
12422 /* True if the 1st argument is nonnegative or the second
12423 argument is an even integer valued real. */
12424 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12429 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12430 n = real_to_integer (&c);
12433 REAL_VALUE_TYPE cint;
12434 real_from_integer (&cint, VOIDmode, n,
12435 n < 0 ? -1 : 0, 0);
12436 if (real_identical (&c, &cint))
12440 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12447 /* ... fall through ... */
12450 if (truth_value_p (TREE_CODE (t)))
12451 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12455 /* We don't know sign of `t', so be conservative and return false. */
12459 /* Return true when T is an address and is known to be nonzero.
12460 For floating point we further ensure that T is not denormal.
12461 Similar logic is present in nonzero_address in rtlanal.h. */
12464 tree_expr_nonzero_p (tree t)
12466 tree type = TREE_TYPE (t);
12468 /* Doing something useful for floating point would need more work. */
12469 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12472 switch (TREE_CODE (t))
12475 /* Query VRP to see if it has recorded any information about
12476 the range of this object. */
12477 return ssa_name_nonzero_p (t);
12480 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12483 /* We used to test for !integer_zerop here. This does not work correctly
12484 if TREE_CONSTANT_OVERFLOW (t). */
12485 return (TREE_INT_CST_LOW (t) != 0
12486 || TREE_INT_CST_HIGH (t) != 0);
12489 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12491 /* With the presence of negative values it is hard
12492 to say something. */
12493 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12494 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12496 /* One of operands must be positive and the other non-negative. */
12497 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12498 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12503 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12505 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12506 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12512 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12513 tree outer_type = TREE_TYPE (t);
12515 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12516 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12522 tree base = get_base_address (TREE_OPERAND (t, 0));
12527 /* Weak declarations may link to NULL. */
12528 if (VAR_OR_FUNCTION_DECL_P (base))
12529 return !DECL_WEAK (base);
12531 /* Constants are never weak. */
12532 if (CONSTANT_CLASS_P (base))
12539 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12540 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12543 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12544 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12547 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12549 /* When both operands are nonzero, then MAX must be too. */
12550 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12553 /* MAX where operand 0 is positive is positive. */
12554 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12556 /* MAX where operand 1 is positive is positive. */
12557 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12558 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12562 case COMPOUND_EXPR:
12565 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12568 case NON_LVALUE_EXPR:
12569 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12572 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12573 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12576 return alloca_call_p (t);
12584 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12585 attempt to fold the expression to a constant without modifying TYPE,
12588 If the expression could be simplified to a constant, then return
12589 the constant. If the expression would not be simplified to a
12590 constant, then return NULL_TREE. */
12593 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12595 tree tem = fold_binary (code, type, op0, op1);
12596 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12599 /* Given the components of a unary expression CODE, TYPE and OP0,
12600 attempt to fold the expression to a constant without modifying
12603 If the expression could be simplified to a constant, then return
12604 the constant. If the expression would not be simplified to a
12605 constant, then return NULL_TREE. */
12608 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12610 tree tem = fold_unary (code, type, op0);
12611 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12614 /* If EXP represents referencing an element in a constant string
12615 (either via pointer arithmetic or array indexing), return the
12616 tree representing the value accessed, otherwise return NULL. */
12619 fold_read_from_constant_string (tree exp)
12621 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12623 tree exp1 = TREE_OPERAND (exp, 0);
12627 if (TREE_CODE (exp) == INDIRECT_REF)
12628 string = string_constant (exp1, &index);
12631 tree low_bound = array_ref_low_bound (exp);
12632 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12634 /* Optimize the special-case of a zero lower bound.
12636 We convert the low_bound to sizetype to avoid some problems
12637 with constant folding. (E.g. suppose the lower bound is 1,
12638 and its mode is QI. Without the conversion,l (ARRAY
12639 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12640 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12641 if (! integer_zerop (low_bound))
12642 index = size_diffop (index, fold_convert (sizetype, low_bound));
12648 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12649 && TREE_CODE (string) == STRING_CST
12650 && TREE_CODE (index) == INTEGER_CST
12651 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12652 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12654 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12655 return fold_convert (TREE_TYPE (exp),
12656 build_int_cst (NULL_TREE,
12657 (TREE_STRING_POINTER (string)
12658 [TREE_INT_CST_LOW (index)])));
12663 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12664 an integer constant or real constant.
12666 TYPE is the type of the result. */
12669 fold_negate_const (tree arg0, tree type)
12671 tree t = NULL_TREE;
12673 switch (TREE_CODE (arg0))
12677 unsigned HOST_WIDE_INT low;
12678 HOST_WIDE_INT high;
12679 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12680 TREE_INT_CST_HIGH (arg0),
12682 t = build_int_cst_wide (type, low, high);
12683 t = force_fit_type (t, 1,
12684 (overflow | TREE_OVERFLOW (arg0))
12685 && !TYPE_UNSIGNED (type),
12686 TREE_CONSTANT_OVERFLOW (arg0));
12691 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12695 gcc_unreachable ();
12701 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12702 an integer constant or real constant.
12704 TYPE is the type of the result. */
12707 fold_abs_const (tree arg0, tree type)
12709 tree t = NULL_TREE;
12711 switch (TREE_CODE (arg0))
12714 /* If the value is unsigned, then the absolute value is
12715 the same as the ordinary value. */
12716 if (TYPE_UNSIGNED (type))
12718 /* Similarly, if the value is non-negative. */
12719 else if (INT_CST_LT (integer_minus_one_node, arg0))
12721 /* If the value is negative, then the absolute value is
12725 unsigned HOST_WIDE_INT low;
12726 HOST_WIDE_INT high;
12727 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12728 TREE_INT_CST_HIGH (arg0),
12730 t = build_int_cst_wide (type, low, high);
12731 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12732 TREE_CONSTANT_OVERFLOW (arg0));
12737 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12738 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12744 gcc_unreachable ();
12750 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12751 constant. TYPE is the type of the result. */
12754 fold_not_const (tree arg0, tree type)
12756 tree t = NULL_TREE;
12758 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12760 t = build_int_cst_wide (type,
12761 ~ TREE_INT_CST_LOW (arg0),
12762 ~ TREE_INT_CST_HIGH (arg0));
12763 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12764 TREE_CONSTANT_OVERFLOW (arg0));
12769 /* Given CODE, a relational operator, the target type, TYPE and two
12770 constant operands OP0 and OP1, return the result of the
12771 relational operation. If the result is not a compile time
12772 constant, then return NULL_TREE. */
12775 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12777 int result, invert;
12779 /* From here on, the only cases we handle are when the result is
12780 known to be a constant. */
12782 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12784 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12785 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12787 /* Handle the cases where either operand is a NaN. */
12788 if (real_isnan (c0) || real_isnan (c1))
12798 case UNORDERED_EXPR:
12812 if (flag_trapping_math)
12818 gcc_unreachable ();
12821 return constant_boolean_node (result, type);
12824 return constant_boolean_node (real_compare (code, c0, c1), type);
12827 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12829 To compute GT, swap the arguments and do LT.
12830 To compute GE, do LT and invert the result.
12831 To compute LE, swap the arguments, do LT and invert the result.
12832 To compute NE, do EQ and invert the result.
12834 Therefore, the code below must handle only EQ and LT. */
12836 if (code == LE_EXPR || code == GT_EXPR)
12841 code = swap_tree_comparison (code);
12844 /* Note that it is safe to invert for real values here because we
12845 have already handled the one case that it matters. */
12848 if (code == NE_EXPR || code == GE_EXPR)
12851 code = invert_tree_comparison (code, false);
12854 /* Compute a result for LT or EQ if args permit;
12855 Otherwise return T. */
12856 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12858 if (code == EQ_EXPR)
12859 result = tree_int_cst_equal (op0, op1);
12860 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12861 result = INT_CST_LT_UNSIGNED (op0, op1);
12863 result = INT_CST_LT (op0, op1);
12870 return constant_boolean_node (result, type);
12873 /* Build an expression for the a clean point containing EXPR with type TYPE.
12874 Don't build a cleanup point expression for EXPR which don't have side
12878 fold_build_cleanup_point_expr (tree type, tree expr)
12880 /* If the expression does not have side effects then we don't have to wrap
12881 it with a cleanup point expression. */
12882 if (!TREE_SIDE_EFFECTS (expr))
12885 /* If the expression is a return, check to see if the expression inside the
12886 return has no side effects or the right hand side of the modify expression
12887 inside the return. If either don't have side effects set we don't need to
12888 wrap the expression in a cleanup point expression. Note we don't check the
12889 left hand side of the modify because it should always be a return decl. */
12890 if (TREE_CODE (expr) == RETURN_EXPR)
12892 tree op = TREE_OPERAND (expr, 0);
12893 if (!op || !TREE_SIDE_EFFECTS (op))
12895 op = TREE_OPERAND (op, 1);
12896 if (!TREE_SIDE_EFFECTS (op))
12900 return build1 (CLEANUP_POINT_EXPR, type, expr);
12903 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12904 avoid confusing the gimplify process. */
12907 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12909 /* The size of the object is not relevant when talking about its address. */
12910 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12911 t = TREE_OPERAND (t, 0);
12913 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12914 if (TREE_CODE (t) == INDIRECT_REF
12915 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12917 t = TREE_OPERAND (t, 0);
12918 if (TREE_TYPE (t) != ptrtype)
12919 t = build1 (NOP_EXPR, ptrtype, t);
12925 while (handled_component_p (base))
12926 base = TREE_OPERAND (base, 0);
12928 TREE_ADDRESSABLE (base) = 1;
12930 t = build1 (ADDR_EXPR, ptrtype, t);
12937 build_fold_addr_expr (tree t)
12939 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12942 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12943 of an indirection through OP0, or NULL_TREE if no simplification is
12947 fold_indirect_ref_1 (tree type, tree op0)
12953 subtype = TREE_TYPE (sub);
12954 if (!POINTER_TYPE_P (subtype))
12957 if (TREE_CODE (sub) == ADDR_EXPR)
12959 tree op = TREE_OPERAND (sub, 0);
12960 tree optype = TREE_TYPE (op);
12961 /* *&p => p; make sure to handle *&"str"[cst] here. */
12962 if (type == optype)
12964 tree fop = fold_read_from_constant_string (op);
12970 /* *(foo *)&fooarray => fooarray[0] */
12971 else if (TREE_CODE (optype) == ARRAY_TYPE
12972 && type == TREE_TYPE (optype))
12974 tree type_domain = TYPE_DOMAIN (optype);
12975 tree min_val = size_zero_node;
12976 if (type_domain && TYPE_MIN_VALUE (type_domain))
12977 min_val = TYPE_MIN_VALUE (type_domain);
12978 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12980 /* *(foo *)&complexfoo => __real__ complexfoo */
12981 else if (TREE_CODE (optype) == COMPLEX_TYPE
12982 && type == TREE_TYPE (optype))
12983 return fold_build1 (REALPART_EXPR, type, op);
12986 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12987 if (TREE_CODE (sub) == PLUS_EXPR
12988 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12990 tree op00 = TREE_OPERAND (sub, 0);
12991 tree op01 = TREE_OPERAND (sub, 1);
12995 op00type = TREE_TYPE (op00);
12996 if (TREE_CODE (op00) == ADDR_EXPR
12997 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12998 && type == TREE_TYPE (TREE_TYPE (op00type)))
13000 tree size = TYPE_SIZE_UNIT (type);
13001 if (tree_int_cst_equal (size, op01))
13002 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13006 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13007 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13008 && type == TREE_TYPE (TREE_TYPE (subtype)))
13011 tree min_val = size_zero_node;
13012 sub = build_fold_indirect_ref (sub);
13013 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13014 if (type_domain && TYPE_MIN_VALUE (type_domain))
13015 min_val = TYPE_MIN_VALUE (type_domain);
13016 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13022 /* Builds an expression for an indirection through T, simplifying some
13026 build_fold_indirect_ref (tree t)
13028 tree type = TREE_TYPE (TREE_TYPE (t));
13029 tree sub = fold_indirect_ref_1 (type, t);
13034 return build1 (INDIRECT_REF, type, t);
13037 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13040 fold_indirect_ref (tree t)
13042 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13050 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13051 whose result is ignored. The type of the returned tree need not be
13052 the same as the original expression. */
13055 fold_ignored_result (tree t)
13057 if (!TREE_SIDE_EFFECTS (t))
13058 return integer_zero_node;
13061 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13064 t = TREE_OPERAND (t, 0);
13068 case tcc_comparison:
13069 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13070 t = TREE_OPERAND (t, 0);
13071 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13072 t = TREE_OPERAND (t, 1);
13077 case tcc_expression:
13078 switch (TREE_CODE (t))
13080 case COMPOUND_EXPR:
13081 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13083 t = TREE_OPERAND (t, 0);
13087 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13088 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13090 t = TREE_OPERAND (t, 0);
13103 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13104 This can only be applied to objects of a sizetype. */
13107 round_up (tree value, int divisor)
13109 tree div = NULL_TREE;
13111 gcc_assert (divisor > 0);
13115 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13116 have to do anything. Only do this when we are not given a const,
13117 because in that case, this check is more expensive than just
13119 if (TREE_CODE (value) != INTEGER_CST)
13121 div = build_int_cst (TREE_TYPE (value), divisor);
13123 if (multiple_of_p (TREE_TYPE (value), value, div))
13127 /* If divisor is a power of two, simplify this to bit manipulation. */
13128 if (divisor == (divisor & -divisor))
13132 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13133 value = size_binop (PLUS_EXPR, value, t);
13134 t = build_int_cst (TREE_TYPE (value), -divisor);
13135 value = size_binop (BIT_AND_EXPR, value, t);
13140 div = build_int_cst (TREE_TYPE (value), divisor);
13141 value = size_binop (CEIL_DIV_EXPR, value, div);
13142 value = size_binop (MULT_EXPR, value, div);
13148 /* Likewise, but round down. */
13151 round_down (tree value, int divisor)
13153 tree div = NULL_TREE;
13155 gcc_assert (divisor > 0);
13159 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13160 have to do anything. Only do this when we are not given a const,
13161 because in that case, this check is more expensive than just
13163 if (TREE_CODE (value) != INTEGER_CST)
13165 div = build_int_cst (TREE_TYPE (value), divisor);
13167 if (multiple_of_p (TREE_TYPE (value), value, div))
13171 /* If divisor is a power of two, simplify this to bit manipulation. */
13172 if (divisor == (divisor & -divisor))
13176 t = build_int_cst (TREE_TYPE (value), -divisor);
13177 value = size_binop (BIT_AND_EXPR, value, t);
13182 div = build_int_cst (TREE_TYPE (value), divisor);
13183 value = size_binop (FLOOR_DIV_EXPR, value, div);
13184 value = size_binop (MULT_EXPR, value, div);
13190 /* Returns the pointer to the base of the object addressed by EXP and
13191 extracts the information about the offset of the access, storing it
13192 to PBITPOS and POFFSET. */
13195 split_address_to_core_and_offset (tree exp,
13196 HOST_WIDE_INT *pbitpos, tree *poffset)
13199 enum machine_mode mode;
13200 int unsignedp, volatilep;
13201 HOST_WIDE_INT bitsize;
13203 if (TREE_CODE (exp) == ADDR_EXPR)
13205 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13206 poffset, &mode, &unsignedp, &volatilep,
13208 core = build_fold_addr_expr (core);
13214 *poffset = NULL_TREE;
13220 /* Returns true if addresses of E1 and E2 differ by a constant, false
13221 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13224 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13227 HOST_WIDE_INT bitpos1, bitpos2;
13228 tree toffset1, toffset2, tdiff, type;
13230 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13231 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13233 if (bitpos1 % BITS_PER_UNIT != 0
13234 || bitpos2 % BITS_PER_UNIT != 0
13235 || !operand_equal_p (core1, core2, 0))
13238 if (toffset1 && toffset2)
13240 type = TREE_TYPE (toffset1);
13241 if (type != TREE_TYPE (toffset2))
13242 toffset2 = fold_convert (type, toffset2);
13244 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13245 if (!cst_and_fits_in_hwi (tdiff))
13248 *diff = int_cst_value (tdiff);
13250 else if (toffset1 || toffset2)
13252 /* If only one of the offsets is non-constant, the difference cannot
13259 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13263 /* Simplify the floating point expression EXP when the sign of the
13264 result is not significant. Return NULL_TREE if no simplification
13268 fold_strip_sign_ops (tree exp)
13272 switch (TREE_CODE (exp))
13276 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13277 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13281 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13283 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13284 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13285 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13286 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13287 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13288 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13292 /* Strip sign ops from the argument of "odd" math functions. */
13293 if (negate_mathfn_p (builtin_mathfn_code (exp)))
13295 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13297 return build_function_call_expr (get_callee_fndecl (exp),
13298 build_tree_list (NULL_TREE, arg0));