1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static int all_ones_mask_p (const_tree, int);
118 static tree sign_bit_p (tree, const_tree);
119 static int simple_operand_p (const_tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static bool fold_real_zero_addition_p (const_tree, const_tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
285 int sign_extended_type;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 unsigned HOST_WIDE_INT l;
331 h = h1 + h2 + (l < l1);
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
355 return (*hv & h1) < 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
388 memset (prod, 0, sizeof prod);
390 for (i = 0; i < 4; i++)
393 for (j = 0; j < 4; j++)
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
409 /* Unsigned overflow is immediate. */
411 return (toplow | tophigh) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
439 unsigned HOST_WIDE_INT signmask;
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
447 if (SHIFT_COUNT_TRUNCATED)
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
457 else if (count >= HOST_BITS_PER_WIDE_INT)
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
469 /* Sign extend all bits that are beyond the precision. */
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 unsigned HOST_WIDE_INT signmask;
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 if (SHIFT_COUNT_TRUNCATED)
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
518 else if (count >= HOST_BITS_PER_WIDE_INT)
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count >= (HOST_WIDE_INT)prec)
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
645 neg_double (lden, hden, &lden, &hden);
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
652 /* This unsigned division rounds toward zero. */
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
666 memset (quo, 0, sizeof quo);
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale = BASE / (den[den_hi_sig] + 1);
705 { /* scale divisor and dividend */
707 for (i = 0; i <= 4 - 1; i++)
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
716 for (i = 0; i <= 4 - 1; i++)
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
754 for (j = 0; j <= den_hi_sig; j++)
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
776 num [num_hi_sig] += carry;
779 /* Store the quotient digit. */
784 decode (quo, lquo, hquo);
787 /* If result is negative, make it so. */
789 neg_double (*lquo, *hquo, lquo, hquo);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
827 case ROUND_MOD_EXPR: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
834 /* Get absolute values. */
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
838 neg_double (lden, hden, &labs_den, &habs_den);
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, <wice, &htwice);
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den < ltwice)))
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
880 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
910 return build_int_cst_wide (type, quol, quoh);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
922 static int fold_deferring_overflow_warnings;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
955 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
964 if (fold_deferred_overflow_warning != NULL
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
974 if (!issue || warnmsg == NULL)
977 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
980 /* Use the smallest code level when deciding to issue the
982 if (code == 0 || code > (int) fold_deferred_overflow_code)
983 code = fold_deferred_overflow_code;
985 if (!issue_strict_overflow_warning (code))
988 if (stmt == NULL_TREE || !expr_has_location (stmt))
989 locus = input_location;
991 locus = expr_location (stmt);
992 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
995 /* Stop deferring overflow warnings, ignoring any deferred
999 fold_undefer_and_ignore_overflow_warnings (void)
1001 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1004 /* Whether we are deferring overflow warnings. */
1007 fold_deferring_overflow_warnings_p (void)
1009 return fold_deferring_overflow_warnings > 0;
1012 /* This is called when we fold something based on the fact that signed
1013 overflow is undefined. */
1016 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1018 gcc_assert (!flag_wrapv && !flag_trapv);
1019 if (fold_deferring_overflow_warnings > 0)
1021 if (fold_deferred_overflow_warning == NULL
1022 || wc < fold_deferred_overflow_code)
1024 fold_deferred_overflow_warning = gmsgid;
1025 fold_deferred_overflow_code = wc;
1028 else if (issue_strict_overflow_warning (wc))
1029 warning (OPT_Wstrict_overflow, gmsgid);
1032 /* Return true if the built-in mathematical function specified by CODE
1033 is odd, i.e. -f(x) == f(-x). */
1036 negate_mathfn_p (enum built_in_function code)
1040 CASE_FLT_FN (BUILT_IN_ASIN):
1041 CASE_FLT_FN (BUILT_IN_ASINH):
1042 CASE_FLT_FN (BUILT_IN_ATAN):
1043 CASE_FLT_FN (BUILT_IN_ATANH):
1044 CASE_FLT_FN (BUILT_IN_CASIN):
1045 CASE_FLT_FN (BUILT_IN_CASINH):
1046 CASE_FLT_FN (BUILT_IN_CATAN):
1047 CASE_FLT_FN (BUILT_IN_CATANH):
1048 CASE_FLT_FN (BUILT_IN_CBRT):
1049 CASE_FLT_FN (BUILT_IN_CPROJ):
1050 CASE_FLT_FN (BUILT_IN_CSIN):
1051 CASE_FLT_FN (BUILT_IN_CSINH):
1052 CASE_FLT_FN (BUILT_IN_CTAN):
1053 CASE_FLT_FN (BUILT_IN_CTANH):
1054 CASE_FLT_FN (BUILT_IN_ERF):
1055 CASE_FLT_FN (BUILT_IN_LLROUND):
1056 CASE_FLT_FN (BUILT_IN_LROUND):
1057 CASE_FLT_FN (BUILT_IN_ROUND):
1058 CASE_FLT_FN (BUILT_IN_SIN):
1059 CASE_FLT_FN (BUILT_IN_SINH):
1060 CASE_FLT_FN (BUILT_IN_TAN):
1061 CASE_FLT_FN (BUILT_IN_TANH):
1062 CASE_FLT_FN (BUILT_IN_TRUNC):
1065 CASE_FLT_FN (BUILT_IN_LLRINT):
1066 CASE_FLT_FN (BUILT_IN_LRINT):
1067 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1068 CASE_FLT_FN (BUILT_IN_RINT):
1069 return !flag_rounding_math;
1077 /* Check whether we may negate an integer constant T without causing
1081 may_negate_without_overflow_p (const_tree t)
1083 unsigned HOST_WIDE_INT val;
1087 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1089 type = TREE_TYPE (t);
1090 if (TYPE_UNSIGNED (type))
1093 prec = TYPE_PRECISION (type);
1094 if (prec > HOST_BITS_PER_WIDE_INT)
1096 if (TREE_INT_CST_LOW (t) != 0)
1098 prec -= HOST_BITS_PER_WIDE_INT;
1099 val = TREE_INT_CST_HIGH (t);
1102 val = TREE_INT_CST_LOW (t);
1103 if (prec < HOST_BITS_PER_WIDE_INT)
1104 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1105 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1108 /* Determine whether an expression T can be cheaply negated using
1109 the function negate_expr without introducing undefined overflow. */
1112 negate_expr_p (tree t)
1119 type = TREE_TYPE (t);
1121 STRIP_SIGN_NOPS (t);
1122 switch (TREE_CODE (t))
1125 if (TYPE_OVERFLOW_WRAPS (type))
1128 /* Check that -CST will not overflow type. */
1129 return may_negate_without_overflow_p (t);
1131 return (INTEGRAL_TYPE_P (type)
1132 && TYPE_OVERFLOW_WRAPS (type));
1140 return negate_expr_p (TREE_REALPART (t))
1141 && negate_expr_p (TREE_IMAGPART (t));
1144 return negate_expr_p (TREE_OPERAND (t, 0))
1145 && negate_expr_p (TREE_OPERAND (t, 1));
1148 return negate_expr_p (TREE_OPERAND (t, 0));
1151 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1152 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1154 /* -(A + B) -> (-B) - A. */
1155 if (negate_expr_p (TREE_OPERAND (t, 1))
1156 && reorder_operands_p (TREE_OPERAND (t, 0),
1157 TREE_OPERAND (t, 1)))
1159 /* -(A + B) -> (-A) - B. */
1160 return negate_expr_p (TREE_OPERAND (t, 0));
1163 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1164 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1165 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1166 && reorder_operands_p (TREE_OPERAND (t, 0),
1167 TREE_OPERAND (t, 1));
1170 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1176 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1177 return negate_expr_p (TREE_OPERAND (t, 1))
1178 || negate_expr_p (TREE_OPERAND (t, 0));
1181 case TRUNC_DIV_EXPR:
1182 case ROUND_DIV_EXPR:
1183 case FLOOR_DIV_EXPR:
1185 case EXACT_DIV_EXPR:
1186 /* In general we can't negate A / B, because if A is INT_MIN and
1187 B is 1, we may turn this into INT_MIN / -1 which is undefined
1188 and actually traps on some architectures. But if overflow is
1189 undefined, we can negate, because - (INT_MIN / 1) is an
1191 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1192 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1194 return negate_expr_p (TREE_OPERAND (t, 1))
1195 || negate_expr_p (TREE_OPERAND (t, 0));
1198 /* Negate -((double)float) as (double)(-float). */
1199 if (TREE_CODE (type) == REAL_TYPE)
1201 tree tem = strip_float_extensions (t);
1203 return negate_expr_p (tem);
1208 /* Negate -f(x) as f(-x). */
1209 if (negate_mathfn_p (builtin_mathfn_code (t)))
1210 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1214 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1215 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1217 tree op1 = TREE_OPERAND (t, 1);
1218 if (TREE_INT_CST_HIGH (op1) == 0
1219 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1220 == TREE_INT_CST_LOW (op1))
1231 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1232 simplification is possible.
1233 If negate_expr_p would return true for T, NULL_TREE will never be
1237 fold_negate_expr (tree t)
1239 tree type = TREE_TYPE (t);
1242 switch (TREE_CODE (t))
1244 /* Convert - (~A) to A + 1. */
1246 if (INTEGRAL_TYPE_P (type))
1247 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1248 build_int_cst (type, 1));
1252 tem = fold_negate_const (t, type);
1253 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1254 || !TYPE_OVERFLOW_TRAPS (type))
1259 tem = fold_negate_const (t, type);
1260 /* Two's complement FP formats, such as c4x, may overflow. */
1261 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1266 tem = fold_negate_const (t, type);
1271 tree rpart = negate_expr (TREE_REALPART (t));
1272 tree ipart = negate_expr (TREE_IMAGPART (t));
1274 if ((TREE_CODE (rpart) == REAL_CST
1275 && TREE_CODE (ipart) == REAL_CST)
1276 || (TREE_CODE (rpart) == INTEGER_CST
1277 && TREE_CODE (ipart) == INTEGER_CST))
1278 return build_complex (type, rpart, ipart);
1283 if (negate_expr_p (t))
1284 return fold_build2 (COMPLEX_EXPR, type,
1285 fold_negate_expr (TREE_OPERAND (t, 0)),
1286 fold_negate_expr (TREE_OPERAND (t, 1)));
1290 if (negate_expr_p (t))
1291 return fold_build1 (CONJ_EXPR, type,
1292 fold_negate_expr (TREE_OPERAND (t, 0)));
1296 return TREE_OPERAND (t, 0);
1299 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1300 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1302 /* -(A + B) -> (-B) - A. */
1303 if (negate_expr_p (TREE_OPERAND (t, 1))
1304 && reorder_operands_p (TREE_OPERAND (t, 0),
1305 TREE_OPERAND (t, 1)))
1307 tem = negate_expr (TREE_OPERAND (t, 1));
1308 return fold_build2 (MINUS_EXPR, type,
1309 tem, TREE_OPERAND (t, 0));
1312 /* -(A + B) -> (-A) - B. */
1313 if (negate_expr_p (TREE_OPERAND (t, 0)))
1315 tem = negate_expr (TREE_OPERAND (t, 0));
1316 return fold_build2 (MINUS_EXPR, type,
1317 tem, TREE_OPERAND (t, 1));
1323 /* - (A - B) -> B - A */
1324 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1325 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1326 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1327 return fold_build2 (MINUS_EXPR, type,
1328 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1332 if (TYPE_UNSIGNED (type))
1338 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1340 tem = TREE_OPERAND (t, 1);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 TREE_OPERAND (t, 0), negate_expr (tem));
1344 tem = TREE_OPERAND (t, 0);
1345 if (negate_expr_p (tem))
1346 return fold_build2 (TREE_CODE (t), type,
1347 negate_expr (tem), TREE_OPERAND (t, 1));
1351 case TRUNC_DIV_EXPR:
1352 case ROUND_DIV_EXPR:
1353 case FLOOR_DIV_EXPR:
1355 case EXACT_DIV_EXPR:
1356 /* In general we can't negate A / B, because if A is INT_MIN and
1357 B is 1, we may turn this into INT_MIN / -1 which is undefined
1358 and actually traps on some architectures. But if overflow is
1359 undefined, we can negate, because - (INT_MIN / 1) is an
1361 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1363 const char * const warnmsg = G_("assuming signed overflow does not "
1364 "occur when negating a division");
1365 tem = TREE_OPERAND (t, 1);
1366 if (negate_expr_p (tem))
1368 if (INTEGRAL_TYPE_P (type)
1369 && (TREE_CODE (tem) != INTEGER_CST
1370 || integer_onep (tem)))
1371 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1372 return fold_build2 (TREE_CODE (t), type,
1373 TREE_OPERAND (t, 0), negate_expr (tem));
1375 tem = TREE_OPERAND (t, 0);
1376 if (negate_expr_p (tem))
1378 if (INTEGRAL_TYPE_P (type)
1379 && (TREE_CODE (tem) != INTEGER_CST
1380 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1381 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1382 return fold_build2 (TREE_CODE (t), type,
1383 negate_expr (tem), TREE_OPERAND (t, 1));
1389 /* Convert -((double)float) into (double)(-float). */
1390 if (TREE_CODE (type) == REAL_TYPE)
1392 tem = strip_float_extensions (t);
1393 if (tem != t && negate_expr_p (tem))
1394 return fold_convert (type, negate_expr (tem));
1399 /* Negate -f(x) as f(-x). */
1400 if (negate_mathfn_p (builtin_mathfn_code (t))
1401 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1405 fndecl = get_callee_fndecl (t);
1406 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1407 return build_call_expr (fndecl, 1, arg);
1412 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1413 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1415 tree op1 = TREE_OPERAND (t, 1);
1416 if (TREE_INT_CST_HIGH (op1) == 0
1417 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1418 == TREE_INT_CST_LOW (op1))
1420 tree ntype = TYPE_UNSIGNED (type)
1421 ? signed_type_for (type)
1422 : unsigned_type_for (type);
1423 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1424 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1425 return fold_convert (type, temp);
1437 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1438 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1439 return NULL_TREE. */
1442 negate_expr (tree t)
1449 type = TREE_TYPE (t);
1450 STRIP_SIGN_NOPS (t);
1452 tem = fold_negate_expr (t);
1454 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1455 return fold_convert (type, tem);
1458 /* Split a tree IN into a constant, literal and variable parts that could be
1459 combined with CODE to make IN. "constant" means an expression with
1460 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1461 commutative arithmetic operation. Store the constant part into *CONP,
1462 the literal in *LITP and return the variable part. If a part isn't
1463 present, set it to null. If the tree does not decompose in this way,
1464 return the entire tree as the variable part and the other parts as null.
1466 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1467 case, we negate an operand that was subtracted. Except if it is a
1468 literal for which we use *MINUS_LITP instead.
1470 If NEGATE_P is true, we are negating all of IN, again except a literal
1471 for which we use *MINUS_LITP instead.
1473 If IN is itself a literal or constant, return it as appropriate.
1475 Note that we do not guarantee that any of the three values will be the
1476 same type as IN, but they will have the same signedness and mode. */
1479 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1480 tree *minus_litp, int negate_p)
1488 /* Strip any conversions that don't change the machine mode or signedness. */
1489 STRIP_SIGN_NOPS (in);
1491 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1492 || TREE_CODE (in) == FIXED_CST)
1494 else if (TREE_CODE (in) == code
1495 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1496 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1497 /* We can associate addition and subtraction together (even
1498 though the C standard doesn't say so) for integers because
1499 the value is not affected. For reals, the value might be
1500 affected, so we can't. */
1501 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1502 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1504 tree op0 = TREE_OPERAND (in, 0);
1505 tree op1 = TREE_OPERAND (in, 1);
1506 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1507 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1509 /* First see if either of the operands is a literal, then a constant. */
1510 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1511 || TREE_CODE (op0) == FIXED_CST)
1512 *litp = op0, op0 = 0;
1513 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1514 || TREE_CODE (op1) == FIXED_CST)
1515 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1517 if (op0 != 0 && TREE_CONSTANT (op0))
1518 *conp = op0, op0 = 0;
1519 else if (op1 != 0 && TREE_CONSTANT (op1))
1520 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1522 /* If we haven't dealt with either operand, this is not a case we can
1523 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1524 if (op0 != 0 && op1 != 0)
1529 var = op1, neg_var_p = neg1_p;
1531 /* Now do any needed negations. */
1533 *minus_litp = *litp, *litp = 0;
1535 *conp = negate_expr (*conp);
1537 var = negate_expr (var);
1539 else if (TREE_CONSTANT (in))
1547 *minus_litp = *litp, *litp = 0;
1548 else if (*minus_litp)
1549 *litp = *minus_litp, *minus_litp = 0;
1550 *conp = negate_expr (*conp);
1551 var = negate_expr (var);
1557 /* Re-associate trees split by the above function. T1 and T2 are either
1558 expressions to associate or null. Return the new expression, if any. If
1559 we build an operation, do it in TYPE and with CODE. */
1562 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1569 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1570 try to fold this since we will have infinite recursion. But do
1571 deal with any NEGATE_EXPRs. */
1572 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1573 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1575 if (code == PLUS_EXPR)
1577 if (TREE_CODE (t1) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1579 fold_convert (type, TREE_OPERAND (t1, 0)));
1580 else if (TREE_CODE (t2) == NEGATE_EXPR)
1581 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1582 fold_convert (type, TREE_OPERAND (t2, 0)));
1583 else if (integer_zerop (t2))
1584 return fold_convert (type, t1);
1586 else if (code == MINUS_EXPR)
1588 if (integer_zerop (t2))
1589 return fold_convert (type, t1);
1592 return build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 return fold_build2 (code, type, fold_convert (type, t1),
1597 fold_convert (type, t2));
1600 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1601 for use in int_const_binop, size_binop and size_diffop. */
1604 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1606 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1608 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1623 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1624 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1625 && TYPE_MODE (type1) == TYPE_MODE (type2);
1629 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1630 to produce a new constant. Return NULL_TREE if we don't know how
1631 to evaluate CODE at compile-time.
1633 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1636 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1638 unsigned HOST_WIDE_INT int1l, int2l;
1639 HOST_WIDE_INT int1h, int2h;
1640 unsigned HOST_WIDE_INT low;
1642 unsigned HOST_WIDE_INT garbagel;
1643 HOST_WIDE_INT garbageh;
1645 tree type = TREE_TYPE (arg1);
1646 int uns = TYPE_UNSIGNED (type);
1648 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1651 int1l = TREE_INT_CST_LOW (arg1);
1652 int1h = TREE_INT_CST_HIGH (arg1);
1653 int2l = TREE_INT_CST_LOW (arg2);
1654 int2h = TREE_INT_CST_HIGH (arg2);
1659 low = int1l | int2l, hi = int1h | int2h;
1663 low = int1l ^ int2l, hi = int1h ^ int2h;
1667 low = int1l & int2l, hi = int1h & int2h;
1673 /* It's unclear from the C standard whether shifts can overflow.
1674 The following code ignores overflow; perhaps a C standard
1675 interpretation ruling is needed. */
1676 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1683 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1688 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1692 neg_double (int2l, int2h, &low, &hi);
1693 add_double (int1l, int1h, low, hi, &low, &hi);
1694 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1698 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1701 case TRUNC_DIV_EXPR:
1702 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1703 case EXACT_DIV_EXPR:
1704 /* This is a shortcut for a common special case. */
1705 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1706 && !TREE_OVERFLOW (arg1)
1707 && !TREE_OVERFLOW (arg2)
1708 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1710 if (code == CEIL_DIV_EXPR)
1713 low = int1l / int2l, hi = 0;
1717 /* ... fall through ... */
1719 case ROUND_DIV_EXPR:
1720 if (int2h == 0 && int2l == 0)
1722 if (int2h == 0 && int2l == 1)
1724 low = int1l, hi = int1h;
1727 if (int1l == int2l && int1h == int2h
1728 && ! (int1l == 0 && int1h == 0))
1733 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1734 &low, &hi, &garbagel, &garbageh);
1737 case TRUNC_MOD_EXPR:
1738 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1739 /* This is a shortcut for a common special case. */
1740 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1741 && !TREE_OVERFLOW (arg1)
1742 && !TREE_OVERFLOW (arg2)
1743 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1745 if (code == CEIL_MOD_EXPR)
1747 low = int1l % int2l, hi = 0;
1751 /* ... fall through ... */
1753 case ROUND_MOD_EXPR:
1754 if (int2h == 0 && int2l == 0)
1756 overflow = div_and_round_double (code, uns,
1757 int1l, int1h, int2l, int2h,
1758 &garbagel, &garbageh, &low, &hi);
1764 low = (((unsigned HOST_WIDE_INT) int1h
1765 < (unsigned HOST_WIDE_INT) int2h)
1766 || (((unsigned HOST_WIDE_INT) int1h
1767 == (unsigned HOST_WIDE_INT) int2h)
1770 low = (int1h < int2h
1771 || (int1h == int2h && int1l < int2l));
1773 if (low == (code == MIN_EXPR))
1774 low = int1l, hi = int1h;
1776 low = int2l, hi = int2h;
1785 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1787 /* Propagate overflow flags ourselves. */
1788 if (((!uns || is_sizetype) && overflow)
1789 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1792 TREE_OVERFLOW (t) = 1;
1796 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1797 ((!uns || is_sizetype) && overflow)
1798 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1803 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1804 constant. We assume ARG1 and ARG2 have the same data type, or at least
1805 are the same kind of constant and the same machine mode. Return zero if
1806 combining the constants is not allowed in the current operating mode.
1808 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1811 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1813 /* Sanity check for the recursive cases. */
1820 if (TREE_CODE (arg1) == INTEGER_CST)
1821 return int_const_binop (code, arg1, arg2, notrunc);
1823 if (TREE_CODE (arg1) == REAL_CST)
1825 enum machine_mode mode;
1828 REAL_VALUE_TYPE value;
1829 REAL_VALUE_TYPE result;
1833 /* The following codes are handled by real_arithmetic. */
1848 d1 = TREE_REAL_CST (arg1);
1849 d2 = TREE_REAL_CST (arg2);
1851 type = TREE_TYPE (arg1);
1852 mode = TYPE_MODE (type);
1854 /* Don't perform operation if we honor signaling NaNs and
1855 either operand is a NaN. */
1856 if (HONOR_SNANS (mode)
1857 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1860 /* Don't perform operation if it would raise a division
1861 by zero exception. */
1862 if (code == RDIV_EXPR
1863 && REAL_VALUES_EQUAL (d2, dconst0)
1864 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1867 /* If either operand is a NaN, just return it. Otherwise, set up
1868 for floating-point trap; we return an overflow. */
1869 if (REAL_VALUE_ISNAN (d1))
1871 else if (REAL_VALUE_ISNAN (d2))
1874 inexact = real_arithmetic (&value, code, &d1, &d2);
1875 real_convert (&result, mode, &value);
1877 /* Don't constant fold this floating point operation if
1878 the result has overflowed and flag_trapping_math. */
1879 if (flag_trapping_math
1880 && MODE_HAS_INFINITIES (mode)
1881 && REAL_VALUE_ISINF (result)
1882 && !REAL_VALUE_ISINF (d1)
1883 && !REAL_VALUE_ISINF (d2))
1886 /* Don't constant fold this floating point operation if the
1887 result may dependent upon the run-time rounding mode and
1888 flag_rounding_math is set, or if GCC's software emulation
1889 is unable to accurately represent the result. */
1890 if ((flag_rounding_math
1891 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1892 && !flag_unsafe_math_optimizations))
1893 && (inexact || !real_identical (&result, &value)))
1896 t = build_real (type, result);
1898 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1902 if (TREE_CODE (arg1) == FIXED_CST)
1904 FIXED_VALUE_TYPE f1;
1905 FIXED_VALUE_TYPE f2;
1906 FIXED_VALUE_TYPE result;
1911 /* The following codes are handled by fixed_arithmetic. */
1917 case TRUNC_DIV_EXPR:
1918 f2 = TREE_FIXED_CST (arg2);
1923 f2.data.high = TREE_INT_CST_HIGH (arg2);
1924 f2.data.low = TREE_INT_CST_LOW (arg2);
1932 f1 = TREE_FIXED_CST (arg1);
1933 type = TREE_TYPE (arg1);
1934 sat_p = TYPE_SATURATING (type);
1935 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1936 t = build_fixed (type, result);
1937 /* Propagate overflow flags. */
1938 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1940 TREE_OVERFLOW (t) = 1;
1941 TREE_CONSTANT_OVERFLOW (t) = 1;
1943 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1944 TREE_CONSTANT_OVERFLOW (t) = 1;
1948 if (TREE_CODE (arg1) == COMPLEX_CST)
1950 tree type = TREE_TYPE (arg1);
1951 tree r1 = TREE_REALPART (arg1);
1952 tree i1 = TREE_IMAGPART (arg1);
1953 tree r2 = TREE_REALPART (arg2);
1954 tree i2 = TREE_IMAGPART (arg2);
1961 real = const_binop (code, r1, r2, notrunc);
1962 imag = const_binop (code, i1, i2, notrunc);
1966 real = const_binop (MINUS_EXPR,
1967 const_binop (MULT_EXPR, r1, r2, notrunc),
1968 const_binop (MULT_EXPR, i1, i2, notrunc),
1970 imag = const_binop (PLUS_EXPR,
1971 const_binop (MULT_EXPR, r1, i2, notrunc),
1972 const_binop (MULT_EXPR, i1, r2, notrunc),
1979 = const_binop (PLUS_EXPR,
1980 const_binop (MULT_EXPR, r2, r2, notrunc),
1981 const_binop (MULT_EXPR, i2, i2, notrunc),
1984 = const_binop (PLUS_EXPR,
1985 const_binop (MULT_EXPR, r1, r2, notrunc),
1986 const_binop (MULT_EXPR, i1, i2, notrunc),
1989 = const_binop (MINUS_EXPR,
1990 const_binop (MULT_EXPR, i1, r2, notrunc),
1991 const_binop (MULT_EXPR, r1, i2, notrunc),
1994 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1995 code = TRUNC_DIV_EXPR;
1997 real = const_binop (code, t1, magsquared, notrunc);
1998 imag = const_binop (code, t2, magsquared, notrunc);
2007 return build_complex (type, real, imag);
2013 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2014 indicates which particular sizetype to create. */
2017 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2019 return build_int_cst (sizetype_tab[(int) kind], number);
2022 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2023 is a tree code. The type of the result is taken from the operands.
2024 Both must be equivalent integer types, ala int_binop_types_match_p.
2025 If the operands are constant, so is the result. */
2028 size_binop (enum tree_code code, tree arg0, tree arg1)
2030 tree type = TREE_TYPE (arg0);
2032 if (arg0 == error_mark_node || arg1 == error_mark_node)
2033 return error_mark_node;
2035 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2038 /* Handle the special case of two integer constants faster. */
2039 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2041 /* And some specific cases even faster than that. */
2042 if (code == PLUS_EXPR)
2044 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2046 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2049 else if (code == MINUS_EXPR)
2051 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2054 else if (code == MULT_EXPR)
2056 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2060 /* Handle general case of two integer constants. */
2061 return int_const_binop (code, arg0, arg1, 0);
2064 return fold_build2 (code, type, arg0, arg1);
2067 /* Given two values, either both of sizetype or both of bitsizetype,
2068 compute the difference between the two values. Return the value
2069 in signed type corresponding to the type of the operands. */
2072 size_diffop (tree arg0, tree arg1)
2074 tree type = TREE_TYPE (arg0);
2077 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2080 /* If the type is already signed, just do the simple thing. */
2081 if (!TYPE_UNSIGNED (type))
2082 return size_binop (MINUS_EXPR, arg0, arg1);
2084 if (type == sizetype)
2086 else if (type == bitsizetype)
2087 ctype = sbitsizetype;
2089 ctype = signed_type_for (type);
2091 /* If either operand is not a constant, do the conversions to the signed
2092 type and subtract. The hardware will do the right thing with any
2093 overflow in the subtraction. */
2094 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2095 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2096 fold_convert (ctype, arg1));
2098 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2099 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2100 overflow) and negate (which can't either). Special-case a result
2101 of zero while we're here. */
2102 if (tree_int_cst_equal (arg0, arg1))
2103 return build_int_cst (ctype, 0);
2104 else if (tree_int_cst_lt (arg1, arg0))
2105 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2107 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2108 fold_convert (ctype, size_binop (MINUS_EXPR,
2112 /* A subroutine of fold_convert_const handling conversions of an
2113 INTEGER_CST to another integer type. */
2116 fold_convert_const_int_from_int (tree type, const_tree arg1)
2120 /* Given an integer constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2123 TREE_INT_CST_HIGH (arg1),
2124 /* Don't set the overflow when
2125 converting from a pointer, */
2126 !POINTER_TYPE_P (TREE_TYPE (arg1))
2127 /* or to a sizetype with same signedness
2128 and the precision is unchanged.
2129 ??? sizetype is always sign-extended,
2130 but its signedness depends on the
2131 frontend. Thus we see spurious overflows
2132 here if we do not check this. */
2133 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2134 == TYPE_PRECISION (type))
2135 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2136 == TYPE_UNSIGNED (type))
2137 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2138 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2139 || (TREE_CODE (type) == INTEGER_TYPE
2140 && TYPE_IS_SIZETYPE (type)))),
2141 (TREE_INT_CST_HIGH (arg1) < 0
2142 && (TYPE_UNSIGNED (type)
2143 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2144 | TREE_OVERFLOW (arg1));
2149 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2150 to an integer type. */
2153 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2158 /* The following code implements the floating point to integer
2159 conversion rules required by the Java Language Specification,
2160 that IEEE NaNs are mapped to zero and values that overflow
2161 the target precision saturate, i.e. values greater than
2162 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2163 are mapped to INT_MIN. These semantics are allowed by the
2164 C and C++ standards that simply state that the behavior of
2165 FP-to-integer conversion is unspecified upon overflow. */
2167 HOST_WIDE_INT high, low;
2169 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2173 case FIX_TRUNC_EXPR:
2174 real_trunc (&r, VOIDmode, &x);
2181 /* If R is NaN, return zero and show we have an overflow. */
2182 if (REAL_VALUE_ISNAN (r))
2189 /* See if R is less than the lower bound or greater than the
2194 tree lt = TYPE_MIN_VALUE (type);
2195 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2196 if (REAL_VALUES_LESS (r, l))
2199 high = TREE_INT_CST_HIGH (lt);
2200 low = TREE_INT_CST_LOW (lt);
2206 tree ut = TYPE_MAX_VALUE (type);
2209 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2210 if (REAL_VALUES_LESS (u, r))
2213 high = TREE_INT_CST_HIGH (ut);
2214 low = TREE_INT_CST_LOW (ut);
2220 REAL_VALUE_TO_INT (&low, &high, r);
2222 t = force_fit_type_double (type, low, high, -1,
2223 overflow | TREE_OVERFLOW (arg1));
2227 /* A subroutine of fold_convert_const handling conversions of a
2228 FIXED_CST to an integer type. */
2231 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2234 double_int temp, temp_trunc;
2237 /* Right shift FIXED_CST to temp by fbit. */
2238 temp = TREE_FIXED_CST (arg1).data;
2239 mode = TREE_FIXED_CST (arg1).mode;
2240 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2242 lshift_double (temp.low, temp.high,
2243 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2244 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2246 /* Left shift temp to temp_trunc by fbit. */
2247 lshift_double (temp.low, temp.high,
2248 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2249 &temp_trunc.low, &temp_trunc.high,
2250 SIGNED_FIXED_POINT_MODE_P (mode));
2257 temp_trunc.high = 0;
2260 /* If FIXED_CST is negative, we need to round the value toward 0.
2261 By checking if the fractional bits are not zero to add 1 to temp. */
2262 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2263 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2268 temp = double_int_add (temp, one);
2271 /* Given a fixed-point constant, make new constant with new type,
2272 appropriately sign-extended or truncated. */
2273 t = force_fit_type_double (type, temp.low, temp.high, -1,
2275 && (TYPE_UNSIGNED (type)
2276 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2277 | TREE_OVERFLOW (arg1));
2282 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2283 to another floating point type. */
2286 fold_convert_const_real_from_real (tree type, const_tree arg1)
2288 REAL_VALUE_TYPE value;
2291 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2292 t = build_real (type, value);
2294 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2298 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2299 to a floating point type. */
2302 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2304 REAL_VALUE_TYPE value;
2307 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2308 t = build_real (type, value);
2310 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2311 TREE_CONSTANT_OVERFLOW (t)
2312 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2316 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2317 to another fixed-point type. */
2320 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2322 FIXED_VALUE_TYPE value;
2326 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2327 TYPE_SATURATING (type));
2328 t = build_fixed (type, value);
2330 /* Propagate overflow flags. */
2331 if (overflow_p | TREE_OVERFLOW (arg1))
2333 TREE_OVERFLOW (t) = 1;
2334 TREE_CONSTANT_OVERFLOW (t) = 1;
2336 else if (TREE_CONSTANT_OVERFLOW (arg1))
2337 TREE_CONSTANT_OVERFLOW (t) = 1;
2341 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2342 to a fixed-point type. */
2345 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2347 FIXED_VALUE_TYPE value;
2351 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2352 TREE_INT_CST (arg1),
2353 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2354 TYPE_SATURATING (type));
2355 t = build_fixed (type, value);
2357 /* Propagate overflow flags. */
2358 if (overflow_p | TREE_OVERFLOW (arg1))
2360 TREE_OVERFLOW (t) = 1;
2361 TREE_CONSTANT_OVERFLOW (t) = 1;
2363 else if (TREE_CONSTANT_OVERFLOW (arg1))
2364 TREE_CONSTANT_OVERFLOW (t) = 1;
2368 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2369 to a fixed-point type. */
2372 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2374 FIXED_VALUE_TYPE value;
2378 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2379 &TREE_REAL_CST (arg1),
2380 TYPE_SATURATING (type));
2381 t = build_fixed (type, value);
2383 /* Propagate overflow flags. */
2384 if (overflow_p | TREE_OVERFLOW (arg1))
2386 TREE_OVERFLOW (t) = 1;
2387 TREE_CONSTANT_OVERFLOW (t) = 1;
2389 else if (TREE_CONSTANT_OVERFLOW (arg1))
2390 TREE_CONSTANT_OVERFLOW (t) = 1;
2394 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2395 type TYPE. If no simplification can be done return NULL_TREE. */
2398 fold_convert_const (enum tree_code code, tree type, tree arg1)
2400 if (TREE_TYPE (arg1) == type)
2403 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2405 if (TREE_CODE (arg1) == INTEGER_CST)
2406 return fold_convert_const_int_from_int (type, arg1);
2407 else if (TREE_CODE (arg1) == REAL_CST)
2408 return fold_convert_const_int_from_real (code, type, arg1);
2409 else if (TREE_CODE (arg1) == FIXED_CST)
2410 return fold_convert_const_int_from_fixed (type, arg1);
2412 else if (TREE_CODE (type) == REAL_TYPE)
2414 if (TREE_CODE (arg1) == INTEGER_CST)
2415 return build_real_from_int_cst (type, arg1);
2416 else if (TREE_CODE (arg1) == REAL_CST)
2417 return fold_convert_const_real_from_real (type, arg1);
2418 else if (TREE_CODE (arg1) == FIXED_CST)
2419 return fold_convert_const_real_from_fixed (type, arg1);
2421 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2423 if (TREE_CODE (arg1) == FIXED_CST)
2424 return fold_convert_const_fixed_from_fixed (type, arg1);
2425 else if (TREE_CODE (arg1) == INTEGER_CST)
2426 return fold_convert_const_fixed_from_int (type, arg1);
2427 else if (TREE_CODE (arg1) == REAL_CST)
2428 return fold_convert_const_fixed_from_real (type, arg1);
2433 /* Construct a vector of zero elements of vector type TYPE. */
2436 build_zero_vector (tree type)
2441 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2442 units = TYPE_VECTOR_SUBPARTS (type);
2445 for (i = 0; i < units; i++)
2446 list = tree_cons (NULL_TREE, elem, list);
2447 return build_vector (type, list);
2450 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2453 fold_convertible_p (const_tree type, const_tree arg)
2455 tree orig = TREE_TYPE (arg);
2460 if (TREE_CODE (arg) == ERROR_MARK
2461 || TREE_CODE (type) == ERROR_MARK
2462 || TREE_CODE (orig) == ERROR_MARK)
2465 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2468 switch (TREE_CODE (type))
2470 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2471 case POINTER_TYPE: case REFERENCE_TYPE:
2473 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2474 || TREE_CODE (orig) == OFFSET_TYPE)
2476 return (TREE_CODE (orig) == VECTOR_TYPE
2477 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2480 return TREE_CODE (type) == TREE_CODE (orig);
2484 /* Convert expression ARG to type TYPE. Used by the middle-end for
2485 simple conversions in preference to calling the front-end's convert. */
2488 fold_convert (tree type, tree arg)
2490 tree orig = TREE_TYPE (arg);
2496 if (TREE_CODE (arg) == ERROR_MARK
2497 || TREE_CODE (type) == ERROR_MARK
2498 || TREE_CODE (orig) == ERROR_MARK)
2499 return error_mark_node;
2501 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2502 return fold_build1 (NOP_EXPR, type, arg);
2504 switch (TREE_CODE (type))
2506 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2507 case POINTER_TYPE: case REFERENCE_TYPE:
2509 if (TREE_CODE (arg) == INTEGER_CST)
2511 tem = fold_convert_const (NOP_EXPR, type, arg);
2512 if (tem != NULL_TREE)
2515 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2516 || TREE_CODE (orig) == OFFSET_TYPE)
2517 return fold_build1 (NOP_EXPR, type, arg);
2518 if (TREE_CODE (orig) == COMPLEX_TYPE)
2520 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2521 return fold_convert (type, tem);
2523 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2524 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2525 return fold_build1 (NOP_EXPR, type, arg);
2528 if (TREE_CODE (arg) == INTEGER_CST)
2530 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2531 if (tem != NULL_TREE)
2534 else if (TREE_CODE (arg) == REAL_CST)
2536 tem = fold_convert_const (NOP_EXPR, type, arg);
2537 if (tem != NULL_TREE)
2540 else if (TREE_CODE (arg) == FIXED_CST)
2542 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2543 if (tem != NULL_TREE)
2547 switch (TREE_CODE (orig))
2550 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2551 case POINTER_TYPE: case REFERENCE_TYPE:
2552 return fold_build1 (FLOAT_EXPR, type, arg);
2555 return fold_build1 (NOP_EXPR, type, arg);
2557 case FIXED_POINT_TYPE:
2558 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2561 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2562 return fold_convert (type, tem);
2568 case FIXED_POINT_TYPE:
2569 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2570 || TREE_CODE (arg) == REAL_CST)
2572 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2573 if (tem != NULL_TREE)
2577 switch (TREE_CODE (orig))
2579 case FIXED_POINT_TYPE:
2584 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2587 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2588 return fold_convert (type, tem);
2595 switch (TREE_CODE (orig))
2598 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2599 case POINTER_TYPE: case REFERENCE_TYPE:
2601 case FIXED_POINT_TYPE:
2602 return build2 (COMPLEX_EXPR, type,
2603 fold_convert (TREE_TYPE (type), arg),
2604 fold_convert (TREE_TYPE (type), integer_zero_node));
2609 if (TREE_CODE (arg) == COMPLEX_EXPR)
2611 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2612 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2613 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2616 arg = save_expr (arg);
2617 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2618 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2619 rpart = fold_convert (TREE_TYPE (type), rpart);
2620 ipart = fold_convert (TREE_TYPE (type), ipart);
2621 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2629 if (integer_zerop (arg))
2630 return build_zero_vector (type);
2631 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2632 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2633 || TREE_CODE (orig) == VECTOR_TYPE);
2634 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2637 tem = fold_ignored_result (arg);
2638 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2640 return fold_build1 (NOP_EXPR, type, tem);
2647 /* Return false if expr can be assumed not to be an lvalue, true
2651 maybe_lvalue_p (const_tree x)
2653 /* We only need to wrap lvalue tree codes. */
2654 switch (TREE_CODE (x))
2665 case ALIGN_INDIRECT_REF:
2666 case MISALIGNED_INDIRECT_REF:
2668 case ARRAY_RANGE_REF:
2674 case PREINCREMENT_EXPR:
2675 case PREDECREMENT_EXPR:
2677 case TRY_CATCH_EXPR:
2678 case WITH_CLEANUP_EXPR:
2681 case GIMPLE_MODIFY_STMT:
2690 /* Assume the worst for front-end tree codes. */
2691 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2699 /* Return an expr equal to X but certainly not valid as an lvalue. */
2704 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2709 if (! maybe_lvalue_p (x))
2711 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2714 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2715 Zero means allow extended lvalues. */
2717 int pedantic_lvalues;
2719 /* When pedantic, return an expr equal to X but certainly not valid as a
2720 pedantic lvalue. Otherwise, return X. */
2723 pedantic_non_lvalue (tree x)
2725 if (pedantic_lvalues)
2726 return non_lvalue (x);
2731 /* Given a tree comparison code, return the code that is the logical inverse
2732 of the given code. It is not safe to do this for floating-point
2733 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2734 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2737 invert_tree_comparison (enum tree_code code, bool honor_nans)
2739 if (honor_nans && flag_trapping_math)
2749 return honor_nans ? UNLE_EXPR : LE_EXPR;
2751 return honor_nans ? UNLT_EXPR : LT_EXPR;
2753 return honor_nans ? UNGE_EXPR : GE_EXPR;
2755 return honor_nans ? UNGT_EXPR : GT_EXPR;
2769 return UNORDERED_EXPR;
2770 case UNORDERED_EXPR:
2771 return ORDERED_EXPR;
2777 /* Similar, but return the comparison that results if the operands are
2778 swapped. This is safe for floating-point. */
2781 swap_tree_comparison (enum tree_code code)
2788 case UNORDERED_EXPR:
2814 /* Convert a comparison tree code from an enum tree_code representation
2815 into a compcode bit-based encoding. This function is the inverse of
2816 compcode_to_comparison. */
2818 static enum comparison_code
2819 comparison_to_compcode (enum tree_code code)
2836 return COMPCODE_ORD;
2837 case UNORDERED_EXPR:
2838 return COMPCODE_UNORD;
2840 return COMPCODE_UNLT;
2842 return COMPCODE_UNEQ;
2844 return COMPCODE_UNLE;
2846 return COMPCODE_UNGT;
2848 return COMPCODE_LTGT;
2850 return COMPCODE_UNGE;
2856 /* Convert a compcode bit-based encoding of a comparison operator back
2857 to GCC's enum tree_code representation. This function is the
2858 inverse of comparison_to_compcode. */
2860 static enum tree_code
2861 compcode_to_comparison (enum comparison_code code)
2878 return ORDERED_EXPR;
2879 case COMPCODE_UNORD:
2880 return UNORDERED_EXPR;
2898 /* Return a tree for the comparison which is the combination of
2899 doing the AND or OR (depending on CODE) of the two operations LCODE
2900 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2901 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2902 if this makes the transformation invalid. */
2905 combine_comparisons (enum tree_code code, enum tree_code lcode,
2906 enum tree_code rcode, tree truth_type,
2907 tree ll_arg, tree lr_arg)
2909 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2910 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2911 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2912 enum comparison_code compcode;
2916 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2917 compcode = lcompcode & rcompcode;
2920 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2921 compcode = lcompcode | rcompcode;
2930 /* Eliminate unordered comparisons, as well as LTGT and ORD
2931 which are not used unless the mode has NaNs. */
2932 compcode &= ~COMPCODE_UNORD;
2933 if (compcode == COMPCODE_LTGT)
2934 compcode = COMPCODE_NE;
2935 else if (compcode == COMPCODE_ORD)
2936 compcode = COMPCODE_TRUE;
2938 else if (flag_trapping_math)
2940 /* Check that the original operation and the optimized ones will trap
2941 under the same condition. */
2942 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2943 && (lcompcode != COMPCODE_EQ)
2944 && (lcompcode != COMPCODE_ORD);
2945 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2946 && (rcompcode != COMPCODE_EQ)
2947 && (rcompcode != COMPCODE_ORD);
2948 bool trap = (compcode & COMPCODE_UNORD) == 0
2949 && (compcode != COMPCODE_EQ)
2950 && (compcode != COMPCODE_ORD);
2952 /* In a short-circuited boolean expression the LHS might be
2953 such that the RHS, if evaluated, will never trap. For
2954 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2955 if neither x nor y is NaN. (This is a mixed blessing: for
2956 example, the expression above will never trap, hence
2957 optimizing it to x < y would be invalid). */
2958 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2959 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2962 /* If the comparison was short-circuited, and only the RHS
2963 trapped, we may now generate a spurious trap. */
2965 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2968 /* If we changed the conditions that cause a trap, we lose. */
2969 if ((ltrap || rtrap) != trap)
2973 if (compcode == COMPCODE_TRUE)
2974 return constant_boolean_node (true, truth_type);
2975 else if (compcode == COMPCODE_FALSE)
2976 return constant_boolean_node (false, truth_type);
2978 return fold_build2 (compcode_to_comparison (compcode),
2979 truth_type, ll_arg, lr_arg);
2982 /* Return nonzero if CODE is a tree code that represents a truth value. */
2985 truth_value_p (enum tree_code code)
2987 return (TREE_CODE_CLASS (code) == tcc_comparison
2988 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2989 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2990 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2993 /* Return nonzero if two operands (typically of the same tree node)
2994 are necessarily equal. If either argument has side-effects this
2995 function returns zero. FLAGS modifies behavior as follows:
2997 If OEP_ONLY_CONST is set, only return nonzero for constants.
2998 This function tests whether the operands are indistinguishable;
2999 it does not test whether they are equal using C's == operation.
3000 The distinction is important for IEEE floating point, because
3001 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3002 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3004 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3005 even though it may hold multiple values during a function.
3006 This is because a GCC tree node guarantees that nothing else is
3007 executed between the evaluation of its "operands" (which may often
3008 be evaluated in arbitrary order). Hence if the operands themselves
3009 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3010 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3011 unset means assuming isochronic (or instantaneous) tree equivalence.
3012 Unless comparing arbitrary expression trees, such as from different
3013 statements, this flag can usually be left unset.
3015 If OEP_PURE_SAME is set, then pure functions with identical arguments
3016 are considered the same. It is used when the caller has other ways
3017 to ensure that global memory is unchanged in between. */
3020 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3022 /* If either is ERROR_MARK, they aren't equal. */
3023 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3026 /* If both types don't have the same signedness, then we can't consider
3027 them equal. We must check this before the STRIP_NOPS calls
3028 because they may change the signedness of the arguments. */
3029 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3032 /* If both types don't have the same precision, then it is not safe
3034 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3040 /* In case both args are comparisons but with different comparison
3041 code, try to swap the comparison operands of one arg to produce
3042 a match and compare that variant. */
3043 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3044 && COMPARISON_CLASS_P (arg0)
3045 && COMPARISON_CLASS_P (arg1))
3047 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3049 if (TREE_CODE (arg0) == swap_code)
3050 return operand_equal_p (TREE_OPERAND (arg0, 0),
3051 TREE_OPERAND (arg1, 1), flags)
3052 && operand_equal_p (TREE_OPERAND (arg0, 1),
3053 TREE_OPERAND (arg1, 0), flags);
3056 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3057 /* This is needed for conversions and for COMPONENT_REF.
3058 Might as well play it safe and always test this. */
3059 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3060 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3061 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3064 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3065 We don't care about side effects in that case because the SAVE_EXPR
3066 takes care of that for us. In all other cases, two expressions are
3067 equal if they have no side effects. If we have two identical
3068 expressions with side effects that should be treated the same due
3069 to the only side effects being identical SAVE_EXPR's, that will
3070 be detected in the recursive calls below. */
3071 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3072 && (TREE_CODE (arg0) == SAVE_EXPR
3073 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3076 /* Next handle constant cases, those for which we can return 1 even
3077 if ONLY_CONST is set. */
3078 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3079 switch (TREE_CODE (arg0))
3082 return tree_int_cst_equal (arg0, arg1);
3085 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3086 TREE_FIXED_CST (arg1));
3089 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3090 TREE_REAL_CST (arg1)))
3094 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3096 /* If we do not distinguish between signed and unsigned zero,
3097 consider them equal. */
3098 if (real_zerop (arg0) && real_zerop (arg1))
3107 v1 = TREE_VECTOR_CST_ELTS (arg0);
3108 v2 = TREE_VECTOR_CST_ELTS (arg1);
3111 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3114 v1 = TREE_CHAIN (v1);
3115 v2 = TREE_CHAIN (v2);
3122 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3124 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3128 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3129 && ! memcmp (TREE_STRING_POINTER (arg0),
3130 TREE_STRING_POINTER (arg1),
3131 TREE_STRING_LENGTH (arg0)));
3134 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3140 if (flags & OEP_ONLY_CONST)
3143 /* Define macros to test an operand from arg0 and arg1 for equality and a
3144 variant that allows null and views null as being different from any
3145 non-null value. In the latter case, if either is null, the both
3146 must be; otherwise, do the normal comparison. */
3147 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3148 TREE_OPERAND (arg1, N), flags)
3150 #define OP_SAME_WITH_NULL(N) \
3151 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3152 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3154 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3157 /* Two conversions are equal only if signedness and modes match. */
3158 switch (TREE_CODE (arg0))
3162 case FIX_TRUNC_EXPR:
3163 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3164 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3174 case tcc_comparison:
3176 if (OP_SAME (0) && OP_SAME (1))
3179 /* For commutative ops, allow the other order. */
3180 return (commutative_tree_code (TREE_CODE (arg0))
3181 && operand_equal_p (TREE_OPERAND (arg0, 0),
3182 TREE_OPERAND (arg1, 1), flags)
3183 && operand_equal_p (TREE_OPERAND (arg0, 1),
3184 TREE_OPERAND (arg1, 0), flags));
3187 /* If either of the pointer (or reference) expressions we are
3188 dereferencing contain a side effect, these cannot be equal. */
3189 if (TREE_SIDE_EFFECTS (arg0)
3190 || TREE_SIDE_EFFECTS (arg1))
3193 switch (TREE_CODE (arg0))
3196 case ALIGN_INDIRECT_REF:
3197 case MISALIGNED_INDIRECT_REF:
3203 case ARRAY_RANGE_REF:
3204 /* Operands 2 and 3 may be null.
3205 Compare the array index by value if it is constant first as we
3206 may have different types but same value here. */
3208 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3209 TREE_OPERAND (arg1, 1))
3211 && OP_SAME_WITH_NULL (2)
3212 && OP_SAME_WITH_NULL (3));
3215 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3216 may be NULL when we're called to compare MEM_EXPRs. */
3217 return OP_SAME_WITH_NULL (0)
3219 && OP_SAME_WITH_NULL (2);
3222 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3228 case tcc_expression:
3229 switch (TREE_CODE (arg0))
3232 case TRUTH_NOT_EXPR:
3235 case TRUTH_ANDIF_EXPR:
3236 case TRUTH_ORIF_EXPR:
3237 return OP_SAME (0) && OP_SAME (1);
3239 case TRUTH_AND_EXPR:
3241 case TRUTH_XOR_EXPR:
3242 if (OP_SAME (0) && OP_SAME (1))
3245 /* Otherwise take into account this is a commutative operation. */
3246 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3247 TREE_OPERAND (arg1, 1), flags)
3248 && operand_equal_p (TREE_OPERAND (arg0, 1),
3249 TREE_OPERAND (arg1, 0), flags));
3256 switch (TREE_CODE (arg0))
3259 /* If the CALL_EXPRs call different functions, then they
3260 clearly can not be equal. */
3261 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3266 unsigned int cef = call_expr_flags (arg0);
3267 if (flags & OEP_PURE_SAME)
3268 cef &= ECF_CONST | ECF_PURE;
3275 /* Now see if all the arguments are the same. */
3277 const_call_expr_arg_iterator iter0, iter1;
3279 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3280 a1 = first_const_call_expr_arg (arg1, &iter1);
3282 a0 = next_const_call_expr_arg (&iter0),
3283 a1 = next_const_call_expr_arg (&iter1))
3284 if (! operand_equal_p (a0, a1, flags))
3287 /* If we get here and both argument lists are exhausted
3288 then the CALL_EXPRs are equal. */
3289 return ! (a0 || a1);
3295 case tcc_declaration:
3296 /* Consider __builtin_sqrt equal to sqrt. */
3297 return (TREE_CODE (arg0) == FUNCTION_DECL
3298 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3299 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3300 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3307 #undef OP_SAME_WITH_NULL
3310 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3311 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3313 When in doubt, return 0. */
3316 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3318 int unsignedp1, unsignedpo;
3319 tree primarg0, primarg1, primother;
3320 unsigned int correct_width;
3322 if (operand_equal_p (arg0, arg1, 0))
3325 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3326 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3329 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3330 and see if the inner values are the same. This removes any
3331 signedness comparison, which doesn't matter here. */
3332 primarg0 = arg0, primarg1 = arg1;
3333 STRIP_NOPS (primarg0);
3334 STRIP_NOPS (primarg1);
3335 if (operand_equal_p (primarg0, primarg1, 0))
3338 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3339 actual comparison operand, ARG0.
3341 First throw away any conversions to wider types
3342 already present in the operands. */
3344 primarg1 = get_narrower (arg1, &unsignedp1);
3345 primother = get_narrower (other, &unsignedpo);
3347 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3348 if (unsignedp1 == unsignedpo
3349 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3350 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3352 tree type = TREE_TYPE (arg0);
3354 /* Make sure shorter operand is extended the right way
3355 to match the longer operand. */
3356 primarg1 = fold_convert (signed_or_unsigned_type_for
3357 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3359 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3366 /* See if ARG is an expression that is either a comparison or is performing
3367 arithmetic on comparisons. The comparisons must only be comparing
3368 two different values, which will be stored in *CVAL1 and *CVAL2; if
3369 they are nonzero it means that some operands have already been found.
3370 No variables may be used anywhere else in the expression except in the
3371 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3372 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3374 If this is true, return 1. Otherwise, return zero. */
3377 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3379 enum tree_code code = TREE_CODE (arg);
3380 enum tree_code_class class = TREE_CODE_CLASS (code);
3382 /* We can handle some of the tcc_expression cases here. */
3383 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3385 else if (class == tcc_expression
3386 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3387 || code == COMPOUND_EXPR))
3390 else if (class == tcc_expression && code == SAVE_EXPR
3391 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3393 /* If we've already found a CVAL1 or CVAL2, this expression is
3394 two complex to handle. */
3395 if (*cval1 || *cval2)
3405 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3408 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3409 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3410 cval1, cval2, save_p));
3415 case tcc_expression:
3416 if (code == COND_EXPR)
3417 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3418 cval1, cval2, save_p)
3419 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3420 cval1, cval2, save_p)
3421 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3422 cval1, cval2, save_p));
3425 case tcc_comparison:
3426 /* First see if we can handle the first operand, then the second. For
3427 the second operand, we know *CVAL1 can't be zero. It must be that
3428 one side of the comparison is each of the values; test for the
3429 case where this isn't true by failing if the two operands
3432 if (operand_equal_p (TREE_OPERAND (arg, 0),
3433 TREE_OPERAND (arg, 1), 0))
3437 *cval1 = TREE_OPERAND (arg, 0);
3438 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3440 else if (*cval2 == 0)
3441 *cval2 = TREE_OPERAND (arg, 0);
3442 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3447 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3449 else if (*cval2 == 0)
3450 *cval2 = TREE_OPERAND (arg, 1);
3451 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3463 /* ARG is a tree that is known to contain just arithmetic operations and
3464 comparisons. Evaluate the operations in the tree substituting NEW0 for
3465 any occurrence of OLD0 as an operand of a comparison and likewise for
3469 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3471 tree type = TREE_TYPE (arg);
3472 enum tree_code code = TREE_CODE (arg);
3473 enum tree_code_class class = TREE_CODE_CLASS (code);
3475 /* We can handle some of the tcc_expression cases here. */
3476 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3478 else if (class == tcc_expression
3479 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3485 return fold_build1 (code, type,
3486 eval_subst (TREE_OPERAND (arg, 0),
3487 old0, new0, old1, new1));
3490 return fold_build2 (code, type,
3491 eval_subst (TREE_OPERAND (arg, 0),
3492 old0, new0, old1, new1),
3493 eval_subst (TREE_OPERAND (arg, 1),
3494 old0, new0, old1, new1));
3496 case tcc_expression:
3500 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3503 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3506 return fold_build3 (code, type,
3507 eval_subst (TREE_OPERAND (arg, 0),
3508 old0, new0, old1, new1),
3509 eval_subst (TREE_OPERAND (arg, 1),
3510 old0, new0, old1, new1),
3511 eval_subst (TREE_OPERAND (arg, 2),
3512 old0, new0, old1, new1));
3516 /* Fall through - ??? */
3518 case tcc_comparison:
3520 tree arg0 = TREE_OPERAND (arg, 0);
3521 tree arg1 = TREE_OPERAND (arg, 1);
3523 /* We need to check both for exact equality and tree equality. The
3524 former will be true if the operand has a side-effect. In that
3525 case, we know the operand occurred exactly once. */
3527 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3529 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3532 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3534 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3537 return fold_build2 (code, type, arg0, arg1);
3545 /* Return a tree for the case when the result of an expression is RESULT
3546 converted to TYPE and OMITTED was previously an operand of the expression
3547 but is now not needed (e.g., we folded OMITTED * 0).
3549 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3550 the conversion of RESULT to TYPE. */
3553 omit_one_operand (tree type, tree result, tree omitted)
3555 tree t = fold_convert (type, result);
3557 /* If the resulting operand is an empty statement, just return the omitted
3558 statement casted to void. */
3559 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3560 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3562 if (TREE_SIDE_EFFECTS (omitted))
3563 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3565 return non_lvalue (t);
3568 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3571 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3573 tree t = fold_convert (type, result);
3575 /* If the resulting operand is an empty statement, just return the omitted
3576 statement casted to void. */
3577 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3578 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3580 if (TREE_SIDE_EFFECTS (omitted))
3581 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3583 return pedantic_non_lvalue (t);
3586 /* Return a tree for the case when the result of an expression is RESULT
3587 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3588 of the expression but are now not needed.
3590 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3591 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3592 evaluated before OMITTED2. Otherwise, if neither has side effects,
3593 just do the conversion of RESULT to TYPE. */
3596 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3598 tree t = fold_convert (type, result);
3600 if (TREE_SIDE_EFFECTS (omitted2))
3601 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3602 if (TREE_SIDE_EFFECTS (omitted1))
3603 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3605 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3609 /* Return a simplified tree node for the truth-negation of ARG. This
3610 never alters ARG itself. We assume that ARG is an operation that
3611 returns a truth value (0 or 1).
3613 FIXME: one would think we would fold the result, but it causes
3614 problems with the dominator optimizer. */
3617 fold_truth_not_expr (tree arg)
3619 tree type = TREE_TYPE (arg);
3620 enum tree_code code = TREE_CODE (arg);
3622 /* If this is a comparison, we can simply invert it, except for
3623 floating-point non-equality comparisons, in which case we just
3624 enclose a TRUTH_NOT_EXPR around what we have. */
3626 if (TREE_CODE_CLASS (code) == tcc_comparison)
3628 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3629 if (FLOAT_TYPE_P (op_type)
3630 && flag_trapping_math
3631 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3632 && code != NE_EXPR && code != EQ_EXPR)
3636 code = invert_tree_comparison (code,
3637 HONOR_NANS (TYPE_MODE (op_type)));
3638 if (code == ERROR_MARK)
3641 return build2 (code, type,
3642 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3649 return constant_boolean_node (integer_zerop (arg), type);
3651 case TRUTH_AND_EXPR:
3652 return build2 (TRUTH_OR_EXPR, type,
3653 invert_truthvalue (TREE_OPERAND (arg, 0)),
3654 invert_truthvalue (TREE_OPERAND (arg, 1)));
3657 return build2 (TRUTH_AND_EXPR, type,
3658 invert_truthvalue (TREE_OPERAND (arg, 0)),
3659 invert_truthvalue (TREE_OPERAND (arg, 1)));
3661 case TRUTH_XOR_EXPR:
3662 /* Here we can invert either operand. We invert the first operand
3663 unless the second operand is a TRUTH_NOT_EXPR in which case our
3664 result is the XOR of the first operand with the inside of the
3665 negation of the second operand. */
3667 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3668 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3669 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3671 return build2 (TRUTH_XOR_EXPR, type,
3672 invert_truthvalue (TREE_OPERAND (arg, 0)),
3673 TREE_OPERAND (arg, 1));
3675 case TRUTH_ANDIF_EXPR:
3676 return build2 (TRUTH_ORIF_EXPR, type,
3677 invert_truthvalue (TREE_OPERAND (arg, 0)),
3678 invert_truthvalue (TREE_OPERAND (arg, 1)));
3680 case TRUTH_ORIF_EXPR:
3681 return build2 (TRUTH_ANDIF_EXPR, type,
3682 invert_truthvalue (TREE_OPERAND (arg, 0)),
3683 invert_truthvalue (TREE_OPERAND (arg, 1)));
3685 case TRUTH_NOT_EXPR:
3686 return TREE_OPERAND (arg, 0);
3690 tree arg1 = TREE_OPERAND (arg, 1);
3691 tree arg2 = TREE_OPERAND (arg, 2);
3692 /* A COND_EXPR may have a throw as one operand, which
3693 then has void type. Just leave void operands
3695 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3696 VOID_TYPE_P (TREE_TYPE (arg1))
3697 ? arg1 : invert_truthvalue (arg1),
3698 VOID_TYPE_P (TREE_TYPE (arg2))
3699 ? arg2 : invert_truthvalue (arg2));
3703 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3704 invert_truthvalue (TREE_OPERAND (arg, 1)));
3706 case NON_LVALUE_EXPR:
3707 return invert_truthvalue (TREE_OPERAND (arg, 0));
3710 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3711 return build1 (TRUTH_NOT_EXPR, type, arg);
3715 return build1 (TREE_CODE (arg), type,
3716 invert_truthvalue (TREE_OPERAND (arg, 0)));
3719 if (!integer_onep (TREE_OPERAND (arg, 1)))
3721 return build2 (EQ_EXPR, type, arg,
3722 build_int_cst (type, 0));
3725 return build1 (TRUTH_NOT_EXPR, type, arg);
3727 case CLEANUP_POINT_EXPR:
3728 return build1 (CLEANUP_POINT_EXPR, type,
3729 invert_truthvalue (TREE_OPERAND (arg, 0)));
3738 /* Return a simplified tree node for the truth-negation of ARG. This
3739 never alters ARG itself. We assume that ARG is an operation that
3740 returns a truth value (0 or 1).
3742 FIXME: one would think we would fold the result, but it causes
3743 problems with the dominator optimizer. */
3746 invert_truthvalue (tree arg)
3750 if (TREE_CODE (arg) == ERROR_MARK)
3753 tem = fold_truth_not_expr (arg);
3755 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3760 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3761 operands are another bit-wise operation with a common input. If so,
3762 distribute the bit operations to save an operation and possibly two if
3763 constants are involved. For example, convert
3764 (A | B) & (A | C) into A | (B & C)
3765 Further simplification will occur if B and C are constants.
3767 If this optimization cannot be done, 0 will be returned. */
3770 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3775 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3776 || TREE_CODE (arg0) == code
3777 || (TREE_CODE (arg0) != BIT_AND_EXPR
3778 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3781 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3783 common = TREE_OPERAND (arg0, 0);
3784 left = TREE_OPERAND (arg0, 1);
3785 right = TREE_OPERAND (arg1, 1);
3787 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3789 common = TREE_OPERAND (arg0, 0);
3790 left = TREE_OPERAND (arg0, 1);
3791 right = TREE_OPERAND (arg1, 0);
3793 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3795 common = TREE_OPERAND (arg0, 1);
3796 left = TREE_OPERAND (arg0, 0);
3797 right = TREE_OPERAND (arg1, 1);
3799 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3801 common = TREE_OPERAND (arg0, 1);
3802 left = TREE_OPERAND (arg0, 0);
3803 right = TREE_OPERAND (arg1, 0);
3808 return fold_build2 (TREE_CODE (arg0), type, common,
3809 fold_build2 (code, type, left, right));
3812 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3813 with code CODE. This optimization is unsafe. */
3815 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3817 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3818 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3820 /* (A / C) +- (B / C) -> (A +- B) / C. */
3822 && operand_equal_p (TREE_OPERAND (arg0, 1),
3823 TREE_OPERAND (arg1, 1), 0))
3824 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3825 fold_build2 (code, type,
3826 TREE_OPERAND (arg0, 0),
3827 TREE_OPERAND (arg1, 0)),
3828 TREE_OPERAND (arg0, 1));
3830 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3831 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3832 TREE_OPERAND (arg1, 0), 0)
3833 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3834 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3836 REAL_VALUE_TYPE r0, r1;
3837 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3838 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3840 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3842 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3843 real_arithmetic (&r0, code, &r0, &r1);
3844 return fold_build2 (MULT_EXPR, type,
3845 TREE_OPERAND (arg0, 0),
3846 build_real (type, r0));
3852 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3853 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3856 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3863 tree size = TYPE_SIZE (TREE_TYPE (inner));
3864 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3865 || POINTER_TYPE_P (TREE_TYPE (inner)))
3866 && host_integerp (size, 0)
3867 && tree_low_cst (size, 0) == bitsize)
3868 return fold_convert (type, inner);
3871 result = build3 (BIT_FIELD_REF, type, inner,
3872 size_int (bitsize), bitsize_int (bitpos));
3874 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3879 /* Optimize a bit-field compare.
3881 There are two cases: First is a compare against a constant and the
3882 second is a comparison of two items where the fields are at the same
3883 bit position relative to the start of a chunk (byte, halfword, word)
3884 large enough to contain it. In these cases we can avoid the shift
3885 implicit in bitfield extractions.
3887 For constants, we emit a compare of the shifted constant with the
3888 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3889 compared. For two fields at the same position, we do the ANDs with the
3890 similar mask and compare the result of the ANDs.
3892 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3893 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3894 are the left and right operands of the comparison, respectively.
3896 If the optimization described above can be done, we return the resulting
3897 tree. Otherwise we return zero. */
3900 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3903 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3904 tree type = TREE_TYPE (lhs);
3905 tree signed_type, unsigned_type;
3906 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3907 enum machine_mode lmode, rmode, nmode;
3908 int lunsignedp, runsignedp;
3909 int lvolatilep = 0, rvolatilep = 0;
3910 tree linner, rinner = NULL_TREE;
3914 /* Get all the information about the extractions being done. If the bit size
3915 if the same as the size of the underlying object, we aren't doing an
3916 extraction at all and so can do nothing. We also don't want to
3917 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3918 then will no longer be able to replace it. */
3919 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3920 &lunsignedp, &lvolatilep, false);
3921 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3922 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3927 /* If this is not a constant, we can only do something if bit positions,
3928 sizes, and signedness are the same. */
3929 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3930 &runsignedp, &rvolatilep, false);
3932 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3933 || lunsignedp != runsignedp || offset != 0
3934 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3938 /* See if we can find a mode to refer to this field. We should be able to,
3939 but fail if we can't. */
3940 nmode = get_best_mode (lbitsize, lbitpos,
3941 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3942 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3943 TYPE_ALIGN (TREE_TYPE (rinner))),
3944 word_mode, lvolatilep || rvolatilep);
3945 if (nmode == VOIDmode)
3948 /* Set signed and unsigned types of the precision of this mode for the
3950 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3951 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3953 /* Compute the bit position and size for the new reference and our offset
3954 within it. If the new reference is the same size as the original, we
3955 won't optimize anything, so return zero. */
3956 nbitsize = GET_MODE_BITSIZE (nmode);
3957 nbitpos = lbitpos & ~ (nbitsize - 1);
3959 if (nbitsize == lbitsize)
3962 if (BYTES_BIG_ENDIAN)
3963 lbitpos = nbitsize - lbitsize - lbitpos;
3965 /* Make the mask to be used against the extracted field. */
3966 mask = build_int_cst_type (unsigned_type, -1);
3967 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3968 mask = const_binop (RSHIFT_EXPR, mask,
3969 size_int (nbitsize - lbitsize - lbitpos), 0);
3972 /* If not comparing with constant, just rework the comparison
3974 return fold_build2 (code, compare_type,
3975 fold_build2 (BIT_AND_EXPR, unsigned_type,
3976 make_bit_field_ref (linner,
3981 fold_build2 (BIT_AND_EXPR, unsigned_type,
3982 make_bit_field_ref (rinner,
3988 /* Otherwise, we are handling the constant case. See if the constant is too
3989 big for the field. Warn and return a tree of for 0 (false) if so. We do
3990 this not only for its own sake, but to avoid having to test for this
3991 error case below. If we didn't, we might generate wrong code.
3993 For unsigned fields, the constant shifted right by the field length should
3994 be all zero. For signed fields, the high-order bits should agree with
3999 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4000 fold_convert (unsigned_type, rhs),
4001 size_int (lbitsize), 0)))
4003 warning (0, "comparison is always %d due to width of bit-field",
4005 return constant_boolean_node (code == NE_EXPR, compare_type);
4010 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4011 size_int (lbitsize - 1), 0);
4012 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4014 warning (0, "comparison is always %d due to width of bit-field",
4016 return constant_boolean_node (code == NE_EXPR, compare_type);
4020 /* Single-bit compares should always be against zero. */
4021 if (lbitsize == 1 && ! integer_zerop (rhs))
4023 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4024 rhs = build_int_cst (type, 0);
4027 /* Make a new bitfield reference, shift the constant over the
4028 appropriate number of bits and mask it with the computed mask
4029 (in case this was a signed field). If we changed it, make a new one. */
4030 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4033 TREE_SIDE_EFFECTS (lhs) = 1;
4034 TREE_THIS_VOLATILE (lhs) = 1;
4037 rhs = const_binop (BIT_AND_EXPR,
4038 const_binop (LSHIFT_EXPR,
4039 fold_convert (unsigned_type, rhs),
4040 size_int (lbitpos), 0),
4043 return build2 (code, compare_type,
4044 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4048 /* Subroutine for fold_truthop: decode a field reference.
4050 If EXP is a comparison reference, we return the innermost reference.
4052 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4053 set to the starting bit number.
4055 If the innermost field can be completely contained in a mode-sized
4056 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4058 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4059 otherwise it is not changed.
4061 *PUNSIGNEDP is set to the signedness of the field.
4063 *PMASK is set to the mask used. This is either contained in a
4064 BIT_AND_EXPR or derived from the width of the field.
4066 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4068 Return 0 if this is not a component reference or is one that we can't
4069 do anything with. */
4072 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4073 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4074 int *punsignedp, int *pvolatilep,
4075 tree *pmask, tree *pand_mask)
4077 tree outer_type = 0;
4079 tree mask, inner, offset;
4081 unsigned int precision;
4083 /* All the optimizations using this function assume integer fields.
4084 There are problems with FP fields since the type_for_size call
4085 below can fail for, e.g., XFmode. */
4086 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4089 /* We are interested in the bare arrangement of bits, so strip everything
4090 that doesn't affect the machine mode. However, record the type of the
4091 outermost expression if it may matter below. */
4092 if (TREE_CODE (exp) == NOP_EXPR
4093 || TREE_CODE (exp) == CONVERT_EXPR
4094 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4095 outer_type = TREE_TYPE (exp);
4098 if (TREE_CODE (exp) == BIT_AND_EXPR)
4100 and_mask = TREE_OPERAND (exp, 1);
4101 exp = TREE_OPERAND (exp, 0);
4102 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4103 if (TREE_CODE (and_mask) != INTEGER_CST)
4107 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4108 punsignedp, pvolatilep, false);
4109 if ((inner == exp && and_mask == 0)
4110 || *pbitsize < 0 || offset != 0
4111 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4114 /* If the number of bits in the reference is the same as the bitsize of
4115 the outer type, then the outer type gives the signedness. Otherwise
4116 (in case of a small bitfield) the signedness is unchanged. */
4117 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4118 *punsignedp = TYPE_UNSIGNED (outer_type);
4120 /* Compute the mask to access the bitfield. */
4121 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4122 precision = TYPE_PRECISION (unsigned_type);
4124 mask = build_int_cst_type (unsigned_type, -1);
4126 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4127 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4129 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4131 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4132 fold_convert (unsigned_type, and_mask), mask);
4135 *pand_mask = and_mask;
4139 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4143 all_ones_mask_p (const_tree mask, int size)
4145 tree type = TREE_TYPE (mask);
4146 unsigned int precision = TYPE_PRECISION (type);
4149 tmask = build_int_cst_type (signed_type_for (type), -1);
4152 tree_int_cst_equal (mask,
4153 const_binop (RSHIFT_EXPR,
4154 const_binop (LSHIFT_EXPR, tmask,
4155 size_int (precision - size),
4157 size_int (precision - size), 0));
4160 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4161 represents the sign bit of EXP's type. If EXP represents a sign
4162 or zero extension, also test VAL against the unextended type.
4163 The return value is the (sub)expression whose sign bit is VAL,
4164 or NULL_TREE otherwise. */
4167 sign_bit_p (tree exp, const_tree val)
4169 unsigned HOST_WIDE_INT mask_lo, lo;
4170 HOST_WIDE_INT mask_hi, hi;
4174 /* Tree EXP must have an integral type. */
4175 t = TREE_TYPE (exp);
4176 if (! INTEGRAL_TYPE_P (t))
4179 /* Tree VAL must be an integer constant. */
4180 if (TREE_CODE (val) != INTEGER_CST
4181 || TREE_OVERFLOW (val))
4184 width = TYPE_PRECISION (t);
4185 if (width > HOST_BITS_PER_WIDE_INT)
4187 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4190 mask_hi = ((unsigned HOST_WIDE_INT) -1
4191 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4197 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4200 mask_lo = ((unsigned HOST_WIDE_INT) -1
4201 >> (HOST_BITS_PER_WIDE_INT - width));
4204 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4205 treat VAL as if it were unsigned. */
4206 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4207 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4210 /* Handle extension from a narrower type. */
4211 if (TREE_CODE (exp) == NOP_EXPR
4212 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4213 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4218 /* Subroutine for fold_truthop: determine if an operand is simple enough
4219 to be evaluated unconditionally. */
4222 simple_operand_p (const_tree exp)
4224 /* Strip any conversions that don't change the machine mode. */
4227 return (CONSTANT_CLASS_P (exp)
4228 || TREE_CODE (exp) == SSA_NAME
4230 && ! TREE_ADDRESSABLE (exp)
4231 && ! TREE_THIS_VOLATILE (exp)
4232 && ! DECL_NONLOCAL (exp)
4233 /* Don't regard global variables as simple. They may be
4234 allocated in ways unknown to the compiler (shared memory,
4235 #pragma weak, etc). */
4236 && ! TREE_PUBLIC (exp)
4237 && ! DECL_EXTERNAL (exp)
4238 /* Loading a static variable is unduly expensive, but global
4239 registers aren't expensive. */
4240 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4243 /* The following functions are subroutines to fold_range_test and allow it to
4244 try to change a logical combination of comparisons into a range test.
4247 X == 2 || X == 3 || X == 4 || X == 5
4251 (unsigned) (X - 2) <= 3
4253 We describe each set of comparisons as being either inside or outside
4254 a range, using a variable named like IN_P, and then describe the
4255 range with a lower and upper bound. If one of the bounds is omitted,
4256 it represents either the highest or lowest value of the type.
4258 In the comments below, we represent a range by two numbers in brackets
4259 preceded by a "+" to designate being inside that range, or a "-" to
4260 designate being outside that range, so the condition can be inverted by
4261 flipping the prefix. An omitted bound is represented by a "-". For
4262 example, "- [-, 10]" means being outside the range starting at the lowest
4263 possible value and ending at 10, in other words, being greater than 10.
4264 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4267 We set up things so that the missing bounds are handled in a consistent
4268 manner so neither a missing bound nor "true" and "false" need to be
4269 handled using a special case. */
4271 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4272 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4273 and UPPER1_P are nonzero if the respective argument is an upper bound
4274 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4275 must be specified for a comparison. ARG1 will be converted to ARG0's
4276 type if both are specified. */
4279 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4280 tree arg1, int upper1_p)
4286 /* If neither arg represents infinity, do the normal operation.
4287 Else, if not a comparison, return infinity. Else handle the special
4288 comparison rules. Note that most of the cases below won't occur, but
4289 are handled for consistency. */
4291 if (arg0 != 0 && arg1 != 0)
4293 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4294 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4296 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4299 if (TREE_CODE_CLASS (code) != tcc_comparison)
4302 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4303 for neither. In real maths, we cannot assume open ended ranges are
4304 the same. But, this is computer arithmetic, where numbers are finite.
4305 We can therefore make the transformation of any unbounded range with
4306 the value Z, Z being greater than any representable number. This permits
4307 us to treat unbounded ranges as equal. */
4308 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4309 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4313 result = sgn0 == sgn1;
4316 result = sgn0 != sgn1;
4319 result = sgn0 < sgn1;
4322 result = sgn0 <= sgn1;
4325 result = sgn0 > sgn1;
4328 result = sgn0 >= sgn1;
4334 return constant_boolean_node (result, type);
4337 /* Given EXP, a logical expression, set the range it is testing into
4338 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4339 actually being tested. *PLOW and *PHIGH will be made of the same
4340 type as the returned expression. If EXP is not a comparison, we
4341 will most likely not be returning a useful value and range. Set
4342 *STRICT_OVERFLOW_P to true if the return value is only valid
4343 because signed overflow is undefined; otherwise, do not change
4344 *STRICT_OVERFLOW_P. */
4347 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4348 bool *strict_overflow_p)
4350 enum tree_code code;
4351 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4352 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4354 tree low, high, n_low, n_high;
4356 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4357 and see if we can refine the range. Some of the cases below may not
4358 happen, but it doesn't seem worth worrying about this. We "continue"
4359 the outer loop when we've changed something; otherwise we "break"
4360 the switch, which will "break" the while. */
4363 low = high = build_int_cst (TREE_TYPE (exp), 0);
4367 code = TREE_CODE (exp);
4368 exp_type = TREE_TYPE (exp);
4370 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4372 if (TREE_OPERAND_LENGTH (exp) > 0)
4373 arg0 = TREE_OPERAND (exp, 0);
4374 if (TREE_CODE_CLASS (code) == tcc_comparison
4375 || TREE_CODE_CLASS (code) == tcc_unary
4376 || TREE_CODE_CLASS (code) == tcc_binary)
4377 arg0_type = TREE_TYPE (arg0);
4378 if (TREE_CODE_CLASS (code) == tcc_binary
4379 || TREE_CODE_CLASS (code) == tcc_comparison
4380 || (TREE_CODE_CLASS (code) == tcc_expression
4381 && TREE_OPERAND_LENGTH (exp) > 1))
4382 arg1 = TREE_OPERAND (exp, 1);
4387 case TRUTH_NOT_EXPR:
4388 in_p = ! in_p, exp = arg0;
4391 case EQ_EXPR: case NE_EXPR:
4392 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4393 /* We can only do something if the range is testing for zero
4394 and if the second operand is an integer constant. Note that
4395 saying something is "in" the range we make is done by
4396 complementing IN_P since it will set in the initial case of
4397 being not equal to zero; "out" is leaving it alone. */
4398 if (low == 0 || high == 0
4399 || ! integer_zerop (low) || ! integer_zerop (high)
4400 || TREE_CODE (arg1) != INTEGER_CST)
4405 case NE_EXPR: /* - [c, c] */
4408 case EQ_EXPR: /* + [c, c] */
4409 in_p = ! in_p, low = high = arg1;
4411 case GT_EXPR: /* - [-, c] */
4412 low = 0, high = arg1;
4414 case GE_EXPR: /* + [c, -] */
4415 in_p = ! in_p, low = arg1, high = 0;
4417 case LT_EXPR: /* - [c, -] */
4418 low = arg1, high = 0;
4420 case LE_EXPR: /* + [-, c] */
4421 in_p = ! in_p, low = 0, high = arg1;
4427 /* If this is an unsigned comparison, we also know that EXP is
4428 greater than or equal to zero. We base the range tests we make
4429 on that fact, so we record it here so we can parse existing
4430 range tests. We test arg0_type since often the return type
4431 of, e.g. EQ_EXPR, is boolean. */
4432 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4434 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4436 build_int_cst (arg0_type, 0),
4440 in_p = n_in_p, low = n_low, high = n_high;
4442 /* If the high bound is missing, but we have a nonzero low
4443 bound, reverse the range so it goes from zero to the low bound
4445 if (high == 0 && low && ! integer_zerop (low))
4448 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4449 integer_one_node, 0);
4450 low = build_int_cst (arg0_type, 0);
4458 /* (-x) IN [a,b] -> x in [-b, -a] */
4459 n_low = range_binop (MINUS_EXPR, exp_type,
4460 build_int_cst (exp_type, 0),
4462 n_high = range_binop (MINUS_EXPR, exp_type,
4463 build_int_cst (exp_type, 0),
4465 low = n_low, high = n_high;
4471 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4472 build_int_cst (exp_type, 1));
4475 case PLUS_EXPR: case MINUS_EXPR:
4476 if (TREE_CODE (arg1) != INTEGER_CST)
4479 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4480 move a constant to the other side. */
4481 if (!TYPE_UNSIGNED (arg0_type)
4482 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4485 /* If EXP is signed, any overflow in the computation is undefined,
4486 so we don't worry about it so long as our computations on
4487 the bounds don't overflow. For unsigned, overflow is defined
4488 and this is exactly the right thing. */
4489 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4490 arg0_type, low, 0, arg1, 0);
4491 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4492 arg0_type, high, 1, arg1, 0);
4493 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4494 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4497 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4498 *strict_overflow_p = true;
4500 /* Check for an unsigned range which has wrapped around the maximum
4501 value thus making n_high < n_low, and normalize it. */
4502 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4504 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4505 integer_one_node, 0);
4506 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4507 integer_one_node, 0);
4509 /* If the range is of the form +/- [ x+1, x ], we won't
4510 be able to normalize it. But then, it represents the
4511 whole range or the empty set, so make it
4513 if (tree_int_cst_equal (n_low, low)
4514 && tree_int_cst_equal (n_high, high))
4520 low = n_low, high = n_high;
4525 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4526 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4529 if (! INTEGRAL_TYPE_P (arg0_type)
4530 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4531 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4534 n_low = low, n_high = high;
4537 n_low = fold_convert (arg0_type, n_low);
4540 n_high = fold_convert (arg0_type, n_high);
4543 /* If we're converting arg0 from an unsigned type, to exp,
4544 a signed type, we will be doing the comparison as unsigned.
4545 The tests above have already verified that LOW and HIGH
4548 So we have to ensure that we will handle large unsigned
4549 values the same way that the current signed bounds treat
4552 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4556 /* For fixed-point modes, we need to pass the saturating flag
4557 as the 2nd parameter. */
4558 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4559 equiv_type = lang_hooks.types.type_for_mode
4560 (TYPE_MODE (arg0_type),
4561 TYPE_SATURATING (arg0_type));
4563 equiv_type = lang_hooks.types.type_for_mode
4564 (TYPE_MODE (arg0_type), 1);
4566 /* A range without an upper bound is, naturally, unbounded.
4567 Since convert would have cropped a very large value, use
4568 the max value for the destination type. */
4570 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4571 : TYPE_MAX_VALUE (arg0_type);
4573 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4574 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4575 fold_convert (arg0_type,
4577 build_int_cst (arg0_type, 1));
4579 /* If the low bound is specified, "and" the range with the
4580 range for which the original unsigned value will be
4584 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4585 1, n_low, n_high, 1,
4586 fold_convert (arg0_type,
4591 in_p = (n_in_p == in_p);
4595 /* Otherwise, "or" the range with the range of the input
4596 that will be interpreted as negative. */
4597 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4598 0, n_low, n_high, 1,
4599 fold_convert (arg0_type,
4604 in_p = (in_p != n_in_p);
4609 low = n_low, high = n_high;
4619 /* If EXP is a constant, we can evaluate whether this is true or false. */
4620 if (TREE_CODE (exp) == INTEGER_CST)
4622 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4624 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4630 *pin_p = in_p, *plow = low, *phigh = high;
4634 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4635 type, TYPE, return an expression to test if EXP is in (or out of, depending
4636 on IN_P) the range. Return 0 if the test couldn't be created. */
4639 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4641 tree etype = TREE_TYPE (exp);
4644 #ifdef HAVE_canonicalize_funcptr_for_compare
4645 /* Disable this optimization for function pointer expressions
4646 on targets that require function pointer canonicalization. */
4647 if (HAVE_canonicalize_funcptr_for_compare
4648 && TREE_CODE (etype) == POINTER_TYPE
4649 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4655 value = build_range_check (type, exp, 1, low, high);
4657 return invert_truthvalue (value);
4662 if (low == 0 && high == 0)
4663 return build_int_cst (type, 1);
4666 return fold_build2 (LE_EXPR, type, exp,
4667 fold_convert (etype, high));
4670 return fold_build2 (GE_EXPR, type, exp,
4671 fold_convert (etype, low));
4673 if (operand_equal_p (low, high, 0))
4674 return fold_build2 (EQ_EXPR, type, exp,
4675 fold_convert (etype, low));
4677 if (integer_zerop (low))
4679 if (! TYPE_UNSIGNED (etype))
4681 etype = unsigned_type_for (etype);
4682 high = fold_convert (etype, high);
4683 exp = fold_convert (etype, exp);
4685 return build_range_check (type, exp, 1, 0, high);
4688 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4689 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4691 unsigned HOST_WIDE_INT lo;
4695 prec = TYPE_PRECISION (etype);
4696 if (prec <= HOST_BITS_PER_WIDE_INT)
4699 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4703 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4704 lo = (unsigned HOST_WIDE_INT) -1;
4707 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4709 if (TYPE_UNSIGNED (etype))
4711 etype = signed_type_for (etype);
4712 exp = fold_convert (etype, exp);
4714 return fold_build2 (GT_EXPR, type, exp,
4715 build_int_cst (etype, 0));
4719 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4720 This requires wrap-around arithmetics for the type of the expression. */
4721 switch (TREE_CODE (etype))
4724 /* There is no requirement that LOW be within the range of ETYPE
4725 if the latter is a subtype. It must, however, be within the base
4726 type of ETYPE. So be sure we do the subtraction in that type. */
4727 if (TREE_TYPE (etype))
4728 etype = TREE_TYPE (etype);
4733 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4734 TYPE_UNSIGNED (etype));
4741 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4742 if (TREE_CODE (etype) == INTEGER_TYPE
4743 && !TYPE_OVERFLOW_WRAPS (etype))
4745 tree utype, minv, maxv;
4747 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4748 for the type in question, as we rely on this here. */
4749 utype = unsigned_type_for (etype);
4750 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4751 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4752 integer_one_node, 1);
4753 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4755 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4762 high = fold_convert (etype, high);
4763 low = fold_convert (etype, low);
4764 exp = fold_convert (etype, exp);
4766 value = const_binop (MINUS_EXPR, high, low, 0);
4769 if (POINTER_TYPE_P (etype))
4771 if (value != 0 && !TREE_OVERFLOW (value))
4773 low = fold_convert (sizetype, low);
4774 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4775 return build_range_check (type,
4776 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4777 1, build_int_cst (etype, 0), value);
4782 if (value != 0 && !TREE_OVERFLOW (value))
4783 return build_range_check (type,
4784 fold_build2 (MINUS_EXPR, etype, exp, low),
4785 1, build_int_cst (etype, 0), value);
4790 /* Return the predecessor of VAL in its type, handling the infinite case. */
4793 range_predecessor (tree val)
4795 tree type = TREE_TYPE (val);
4797 if (INTEGRAL_TYPE_P (type)
4798 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4801 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4804 /* Return the successor of VAL in its type, handling the infinite case. */
4807 range_successor (tree val)
4809 tree type = TREE_TYPE (val);
4811 if (INTEGRAL_TYPE_P (type)
4812 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4815 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4818 /* Given two ranges, see if we can merge them into one. Return 1 if we
4819 can, 0 if we can't. Set the output range into the specified parameters. */
4822 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4823 tree high0, int in1_p, tree low1, tree high1)
4831 int lowequal = ((low0 == 0 && low1 == 0)
4832 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4833 low0, 0, low1, 0)));
4834 int highequal = ((high0 == 0 && high1 == 0)
4835 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4836 high0, 1, high1, 1)));
4838 /* Make range 0 be the range that starts first, or ends last if they
4839 start at the same value. Swap them if it isn't. */
4840 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4843 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4844 high1, 1, high0, 1))))
4846 temp = in0_p, in0_p = in1_p, in1_p = temp;
4847 tem = low0, low0 = low1, low1 = tem;
4848 tem = high0, high0 = high1, high1 = tem;
4851 /* Now flag two cases, whether the ranges are disjoint or whether the
4852 second range is totally subsumed in the first. Note that the tests
4853 below are simplified by the ones above. */
4854 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4855 high0, 1, low1, 0));
4856 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4857 high1, 1, high0, 1));
4859 /* We now have four cases, depending on whether we are including or
4860 excluding the two ranges. */
4863 /* If they don't overlap, the result is false. If the second range
4864 is a subset it is the result. Otherwise, the range is from the start
4865 of the second to the end of the first. */
4867 in_p = 0, low = high = 0;
4869 in_p = 1, low = low1, high = high1;
4871 in_p = 1, low = low1, high = high0;
4874 else if (in0_p && ! in1_p)
4876 /* If they don't overlap, the result is the first range. If they are
4877 equal, the result is false. If the second range is a subset of the
4878 first, and the ranges begin at the same place, we go from just after
4879 the end of the second range to the end of the first. If the second
4880 range is not a subset of the first, or if it is a subset and both
4881 ranges end at the same place, the range starts at the start of the
4882 first range and ends just before the second range.
4883 Otherwise, we can't describe this as a single range. */
4885 in_p = 1, low = low0, high = high0;
4886 else if (lowequal && highequal)
4887 in_p = 0, low = high = 0;
4888 else if (subset && lowequal)
4890 low = range_successor (high1);
4895 /* We are in the weird situation where high0 > high1 but
4896 high1 has no successor. Punt. */
4900 else if (! subset || highequal)
4903 high = range_predecessor (low1);
4907 /* low0 < low1 but low1 has no predecessor. Punt. */
4915 else if (! in0_p && in1_p)
4917 /* If they don't overlap, the result is the second range. If the second
4918 is a subset of the first, the result is false. Otherwise,
4919 the range starts just after the first range and ends at the
4920 end of the second. */
4922 in_p = 1, low = low1, high = high1;
4923 else if (subset || highequal)
4924 in_p = 0, low = high = 0;
4927 low = range_successor (high0);
4932 /* high1 > high0 but high0 has no successor. Punt. */
4940 /* The case where we are excluding both ranges. Here the complex case
4941 is if they don't overlap. In that case, the only time we have a
4942 range is if they are adjacent. If the second is a subset of the
4943 first, the result is the first. Otherwise, the range to exclude
4944 starts at the beginning of the first range and ends at the end of the
4948 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4949 range_successor (high0),
4951 in_p = 0, low = low0, high = high1;
4954 /* Canonicalize - [min, x] into - [-, x]. */
4955 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4956 switch (TREE_CODE (TREE_TYPE (low0)))
4959 if (TYPE_PRECISION (TREE_TYPE (low0))
4960 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4964 if (tree_int_cst_equal (low0,
4965 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4969 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4970 && integer_zerop (low0))
4977 /* Canonicalize - [x, max] into - [x, -]. */
4978 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4979 switch (TREE_CODE (TREE_TYPE (high1)))
4982 if (TYPE_PRECISION (TREE_TYPE (high1))
4983 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4987 if (tree_int_cst_equal (high1,
4988 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4992 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4993 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4995 integer_one_node, 1)))
5002 /* The ranges might be also adjacent between the maximum and
5003 minimum values of the given type. For
5004 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5005 return + [x + 1, y - 1]. */
5006 if (low0 == 0 && high1 == 0)
5008 low = range_successor (high0);
5009 high = range_predecessor (low1);
5010 if (low == 0 || high == 0)
5020 in_p = 0, low = low0, high = high0;
5022 in_p = 0, low = low0, high = high1;
5025 *pin_p = in_p, *plow = low, *phigh = high;
5030 /* Subroutine of fold, looking inside expressions of the form
5031 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5032 of the COND_EXPR. This function is being used also to optimize
5033 A op B ? C : A, by reversing the comparison first.
5035 Return a folded expression whose code is not a COND_EXPR
5036 anymore, or NULL_TREE if no folding opportunity is found. */
5039 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5041 enum tree_code comp_code = TREE_CODE (arg0);
5042 tree arg00 = TREE_OPERAND (arg0, 0);
5043 tree arg01 = TREE_OPERAND (arg0, 1);
5044 tree arg1_type = TREE_TYPE (arg1);
5050 /* If we have A op 0 ? A : -A, consider applying the following
5053 A == 0? A : -A same as -A
5054 A != 0? A : -A same as A
5055 A >= 0? A : -A same as abs (A)
5056 A > 0? A : -A same as abs (A)
5057 A <= 0? A : -A same as -abs (A)
5058 A < 0? A : -A same as -abs (A)
5060 None of these transformations work for modes with signed
5061 zeros. If A is +/-0, the first two transformations will
5062 change the sign of the result (from +0 to -0, or vice
5063 versa). The last four will fix the sign of the result,
5064 even though the original expressions could be positive or
5065 negative, depending on the sign of A.
5067 Note that all these transformations are correct if A is
5068 NaN, since the two alternatives (A and -A) are also NaNs. */
5069 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
5070 ? real_zerop (arg01)
5071 : integer_zerop (arg01))
5072 && ((TREE_CODE (arg2) == NEGATE_EXPR
5073 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5074 /* In the case that A is of the form X-Y, '-A' (arg2) may
5075 have already been folded to Y-X, check for that. */
5076 || (TREE_CODE (arg1) == MINUS_EXPR
5077 && TREE_CODE (arg2) == MINUS_EXPR
5078 && operand_equal_p (TREE_OPERAND (arg1, 0),
5079 TREE_OPERAND (arg2, 1), 0)
5080 && operand_equal_p (TREE_OPERAND (arg1, 1),
5081 TREE_OPERAND (arg2, 0), 0))))
5086 tem = fold_convert (arg1_type, arg1);
5087 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5090 return pedantic_non_lvalue (fold_convert (type, arg1));
5093 if (flag_trapping_math)
5098 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5099 arg1 = fold_convert (signed_type_for
5100 (TREE_TYPE (arg1)), arg1);
5101 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5102 return pedantic_non_lvalue (fold_convert (type, tem));
5105 if (flag_trapping_math)
5109 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5110 arg1 = fold_convert (signed_type_for
5111 (TREE_TYPE (arg1)), arg1);
5112 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5113 return negate_expr (fold_convert (type, tem));
5115 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5119 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5120 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5121 both transformations are correct when A is NaN: A != 0
5122 is then true, and A == 0 is false. */
5124 if (integer_zerop (arg01) && integer_zerop (arg2))
5126 if (comp_code == NE_EXPR)
5127 return pedantic_non_lvalue (fold_convert (type, arg1));
5128 else if (comp_code == EQ_EXPR)
5129 return build_int_cst (type, 0);
5132 /* Try some transformations of A op B ? A : B.
5134 A == B? A : B same as B
5135 A != B? A : B same as A
5136 A >= B? A : B same as max (A, B)
5137 A > B? A : B same as max (B, A)
5138 A <= B? A : B same as min (A, B)
5139 A < B? A : B same as min (B, A)
5141 As above, these transformations don't work in the presence
5142 of signed zeros. For example, if A and B are zeros of
5143 opposite sign, the first two transformations will change
5144 the sign of the result. In the last four, the original
5145 expressions give different results for (A=+0, B=-0) and
5146 (A=-0, B=+0), but the transformed expressions do not.
5148 The first two transformations are correct if either A or B
5149 is a NaN. In the first transformation, the condition will
5150 be false, and B will indeed be chosen. In the case of the
5151 second transformation, the condition A != B will be true,
5152 and A will be chosen.
5154 The conversions to max() and min() are not correct if B is
5155 a number and A is not. The conditions in the original
5156 expressions will be false, so all four give B. The min()
5157 and max() versions would give a NaN instead. */
5158 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
5159 /* Avoid these transformations if the COND_EXPR may be used
5160 as an lvalue in the C++ front-end. PR c++/19199. */
5162 || (strcmp (lang_hooks.name, "GNU C++") != 0
5163 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5164 || ! maybe_lvalue_p (arg1)
5165 || ! maybe_lvalue_p (arg2)))
5167 tree comp_op0 = arg00;
5168 tree comp_op1 = arg01;
5169 tree comp_type = TREE_TYPE (comp_op0);
5171 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5172 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5182 return pedantic_non_lvalue (fold_convert (type, arg2));
5184 return pedantic_non_lvalue (fold_convert (type, arg1));
5189 /* In C++ a ?: expression can be an lvalue, so put the
5190 operand which will be used if they are equal first
5191 so that we can convert this back to the
5192 corresponding COND_EXPR. */
5193 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5195 comp_op0 = fold_convert (comp_type, comp_op0);
5196 comp_op1 = fold_convert (comp_type, comp_op1);
5197 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5198 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5199 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5200 return pedantic_non_lvalue (fold_convert (type, tem));
5207 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5209 comp_op0 = fold_convert (comp_type, comp_op0);
5210 comp_op1 = fold_convert (comp_type, comp_op1);
5211 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5212 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5213 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5214 return pedantic_non_lvalue (fold_convert (type, tem));
5218 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5219 return pedantic_non_lvalue (fold_convert (type, arg2));
5222 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5223 return pedantic_non_lvalue (fold_convert (type, arg1));
5226 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5231 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5232 we might still be able to simplify this. For example,
5233 if C1 is one less or one more than C2, this might have started
5234 out as a MIN or MAX and been transformed by this function.
5235 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5237 if (INTEGRAL_TYPE_P (type)
5238 && TREE_CODE (arg01) == INTEGER_CST
5239 && TREE_CODE (arg2) == INTEGER_CST)
5243 /* We can replace A with C1 in this case. */
5244 arg1 = fold_convert (type, arg01);
5245 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5248 /* If C1 is C2 + 1, this is min(A, C2). */
5249 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5251 && operand_equal_p (arg01,
5252 const_binop (PLUS_EXPR, arg2,
5253 build_int_cst (type, 1), 0),
5255 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5257 fold_convert (type, arg1),
5262 /* If C1 is C2 - 1, this is min(A, C2). */
5263 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5265 && operand_equal_p (arg01,
5266 const_binop (MINUS_EXPR, arg2,
5267 build_int_cst (type, 1), 0),
5269 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5271 fold_convert (type, arg1),
5276 /* If C1 is C2 - 1, this is max(A, C2). */
5277 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5279 && operand_equal_p (arg01,
5280 const_binop (MINUS_EXPR, arg2,
5281 build_int_cst (type, 1), 0),
5283 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5285 fold_convert (type, arg1),
5290 /* If C1 is C2 + 1, this is max(A, C2). */
5291 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5293 && operand_equal_p (arg01,
5294 const_binop (PLUS_EXPR, arg2,
5295 build_int_cst (type, 1), 0),
5297 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5299 fold_convert (type, arg1),
5313 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5314 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5317 /* EXP is some logical combination of boolean tests. See if we can
5318 merge it into some range test. Return the new tree if so. */
5321 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5323 int or_op = (code == TRUTH_ORIF_EXPR
5324 || code == TRUTH_OR_EXPR);
5325 int in0_p, in1_p, in_p;
5326 tree low0, low1, low, high0, high1, high;
5327 bool strict_overflow_p = false;
5328 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5329 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5331 const char * const warnmsg = G_("assuming signed overflow does not occur "
5332 "when simplifying range test");
5334 /* If this is an OR operation, invert both sides; we will invert
5335 again at the end. */
5337 in0_p = ! in0_p, in1_p = ! in1_p;
5339 /* If both expressions are the same, if we can merge the ranges, and we
5340 can build the range test, return it or it inverted. If one of the
5341 ranges is always true or always false, consider it to be the same
5342 expression as the other. */
5343 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5344 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5346 && 0 != (tem = (build_range_check (type,
5348 : rhs != 0 ? rhs : integer_zero_node,
5351 if (strict_overflow_p)
5352 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5353 return or_op ? invert_truthvalue (tem) : tem;
5356 /* On machines where the branch cost is expensive, if this is a
5357 short-circuited branch and the underlying object on both sides
5358 is the same, make a non-short-circuit operation. */
5359 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5360 && lhs != 0 && rhs != 0
5361 && (code == TRUTH_ANDIF_EXPR
5362 || code == TRUTH_ORIF_EXPR)
5363 && operand_equal_p (lhs, rhs, 0))
5365 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5366 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5367 which cases we can't do this. */
5368 if (simple_operand_p (lhs))
5369 return build2 (code == TRUTH_ANDIF_EXPR
5370 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5373 else if (lang_hooks.decls.global_bindings_p () == 0
5374 && ! CONTAINS_PLACEHOLDER_P (lhs))
5376 tree common = save_expr (lhs);
5378 if (0 != (lhs = build_range_check (type, common,
5379 or_op ? ! in0_p : in0_p,
5381 && (0 != (rhs = build_range_check (type, common,
5382 or_op ? ! in1_p : in1_p,
5385 if (strict_overflow_p)
5386 fold_overflow_warning (warnmsg,
5387 WARN_STRICT_OVERFLOW_COMPARISON);
5388 return build2 (code == TRUTH_ANDIF_EXPR
5389 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5398 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5399 bit value. Arrange things so the extra bits will be set to zero if and
5400 only if C is signed-extended to its full width. If MASK is nonzero,
5401 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5404 unextend (tree c, int p, int unsignedp, tree mask)
5406 tree type = TREE_TYPE (c);
5407 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5410 if (p == modesize || unsignedp)
5413 /* We work by getting just the sign bit into the low-order bit, then
5414 into the high-order bit, then sign-extend. We then XOR that value
5416 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5417 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5419 /* We must use a signed type in order to get an arithmetic right shift.
5420 However, we must also avoid introducing accidental overflows, so that
5421 a subsequent call to integer_zerop will work. Hence we must
5422 do the type conversion here. At this point, the constant is either
5423 zero or one, and the conversion to a signed type can never overflow.
5424 We could get an overflow if this conversion is done anywhere else. */
5425 if (TYPE_UNSIGNED (type))
5426 temp = fold_convert (signed_type_for (type), temp);
5428 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5429 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5431 temp = const_binop (BIT_AND_EXPR, temp,
5432 fold_convert (TREE_TYPE (c), mask), 0);
5433 /* If necessary, convert the type back to match the type of C. */
5434 if (TYPE_UNSIGNED (type))
5435 temp = fold_convert (type, temp);
5437 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5440 /* Find ways of folding logical expressions of LHS and RHS:
5441 Try to merge two comparisons to the same innermost item.
5442 Look for range tests like "ch >= '0' && ch <= '9'".
5443 Look for combinations of simple terms on machines with expensive branches
5444 and evaluate the RHS unconditionally.
5446 For example, if we have p->a == 2 && p->b == 4 and we can make an
5447 object large enough to span both A and B, we can do this with a comparison
5448 against the object ANDed with the a mask.
5450 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5451 operations to do this with one comparison.
5453 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5454 function and the one above.
5456 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5457 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5459 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5462 We return the simplified tree or 0 if no optimization is possible. */
5465 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5467 /* If this is the "or" of two comparisons, we can do something if
5468 the comparisons are NE_EXPR. If this is the "and", we can do something
5469 if the comparisons are EQ_EXPR. I.e.,
5470 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5472 WANTED_CODE is this operation code. For single bit fields, we can
5473 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5474 comparison for one-bit fields. */
5476 enum tree_code wanted_code;
5477 enum tree_code lcode, rcode;
5478 tree ll_arg, lr_arg, rl_arg, rr_arg;
5479 tree ll_inner, lr_inner, rl_inner, rr_inner;
5480 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5481 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5482 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5483 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5484 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5485 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5486 enum machine_mode lnmode, rnmode;
5487 tree ll_mask, lr_mask, rl_mask, rr_mask;
5488 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5489 tree l_const, r_const;
5490 tree lntype, rntype, result;
5491 int first_bit, end_bit;
5493 tree orig_lhs = lhs, orig_rhs = rhs;
5494 enum tree_code orig_code = code;
5496 /* Start by getting the comparison codes. Fail if anything is volatile.
5497 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5498 it were surrounded with a NE_EXPR. */
5500 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5503 lcode = TREE_CODE (lhs);
5504 rcode = TREE_CODE (rhs);
5506 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5508 lhs = build2 (NE_EXPR, truth_type, lhs,
5509 build_int_cst (TREE_TYPE (lhs), 0));
5513 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5515 rhs = build2 (NE_EXPR, truth_type, rhs,
5516 build_int_cst (TREE_TYPE (rhs), 0));
5520 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5521 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5524 ll_arg = TREE_OPERAND (lhs, 0);
5525 lr_arg = TREE_OPERAND (lhs, 1);
5526 rl_arg = TREE_OPERAND (rhs, 0);
5527 rr_arg = TREE_OPERAND (rhs, 1);
5529 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5530 if (simple_operand_p (ll_arg)
5531 && simple_operand_p (lr_arg))
5534 if (operand_equal_p (ll_arg, rl_arg, 0)
5535 && operand_equal_p (lr_arg, rr_arg, 0))
5537 result = combine_comparisons (code, lcode, rcode,
5538 truth_type, ll_arg, lr_arg);
5542 else if (operand_equal_p (ll_arg, rr_arg, 0)
5543 && operand_equal_p (lr_arg, rl_arg, 0))
5545 result = combine_comparisons (code, lcode,
5546 swap_tree_comparison (rcode),
5547 truth_type, ll_arg, lr_arg);
5553 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5554 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5556 /* If the RHS can be evaluated unconditionally and its operands are
5557 simple, it wins to evaluate the RHS unconditionally on machines
5558 with expensive branches. In this case, this isn't a comparison
5559 that can be merged. Avoid doing this if the RHS is a floating-point
5560 comparison since those can trap. */
5562 if (BRANCH_COST >= 2
5563 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5564 && simple_operand_p (rl_arg)
5565 && simple_operand_p (rr_arg))
5567 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5568 if (code == TRUTH_OR_EXPR
5569 && lcode == NE_EXPR && integer_zerop (lr_arg)
5570 && rcode == NE_EXPR && integer_zerop (rr_arg)
5571 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5572 return build2 (NE_EXPR, truth_type,
5573 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5575 build_int_cst (TREE_TYPE (ll_arg), 0));
5577 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5578 if (code == TRUTH_AND_EXPR
5579 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5580 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5581 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5582 return build2 (EQ_EXPR, truth_type,
5583 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5585 build_int_cst (TREE_TYPE (ll_arg), 0));
5587 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5589 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5590 return build2 (code, truth_type, lhs, rhs);
5595 /* See if the comparisons can be merged. Then get all the parameters for
5598 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5599 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5603 ll_inner = decode_field_reference (ll_arg,
5604 &ll_bitsize, &ll_bitpos, &ll_mode,
5605 &ll_unsignedp, &volatilep, &ll_mask,
5607 lr_inner = decode_field_reference (lr_arg,
5608 &lr_bitsize, &lr_bitpos, &lr_mode,
5609 &lr_unsignedp, &volatilep, &lr_mask,
5611 rl_inner = decode_field_reference (rl_arg,
5612 &rl_bitsize, &rl_bitpos, &rl_mode,
5613 &rl_unsignedp, &volatilep, &rl_mask,
5615 rr_inner = decode_field_reference (rr_arg,
5616 &rr_bitsize, &rr_bitpos, &rr_mode,
5617 &rr_unsignedp, &volatilep, &rr_mask,
5620 /* It must be true that the inner operation on the lhs of each
5621 comparison must be the same if we are to be able to do anything.
5622 Then see if we have constants. If not, the same must be true for
5624 if (volatilep || ll_inner == 0 || rl_inner == 0
5625 || ! operand_equal_p (ll_inner, rl_inner, 0))
5628 if (TREE_CODE (lr_arg) == INTEGER_CST
5629 && TREE_CODE (rr_arg) == INTEGER_CST)
5630 l_const = lr_arg, r_const = rr_arg;
5631 else if (lr_inner == 0 || rr_inner == 0
5632 || ! operand_equal_p (lr_inner, rr_inner, 0))
5635 l_const = r_const = 0;
5637 /* If either comparison code is not correct for our logical operation,
5638 fail. However, we can convert a one-bit comparison against zero into
5639 the opposite comparison against that bit being set in the field. */
5641 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5642 if (lcode != wanted_code)
5644 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5646 /* Make the left operand unsigned, since we are only interested
5647 in the value of one bit. Otherwise we are doing the wrong
5656 /* This is analogous to the code for l_const above. */
5657 if (rcode != wanted_code)
5659 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5668 /* See if we can find a mode that contains both fields being compared on
5669 the left. If we can't, fail. Otherwise, update all constants and masks
5670 to be relative to a field of that size. */
5671 first_bit = MIN (ll_bitpos, rl_bitpos);
5672 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5673 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5674 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5676 if (lnmode == VOIDmode)
5679 lnbitsize = GET_MODE_BITSIZE (lnmode);
5680 lnbitpos = first_bit & ~ (lnbitsize - 1);
5681 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5682 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5684 if (BYTES_BIG_ENDIAN)
5686 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5687 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5690 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5691 size_int (xll_bitpos), 0);
5692 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5693 size_int (xrl_bitpos), 0);
5697 l_const = fold_convert (lntype, l_const);
5698 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5699 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5700 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5701 fold_build1 (BIT_NOT_EXPR,
5705 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5707 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5712 r_const = fold_convert (lntype, r_const);
5713 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5714 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5715 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5716 fold_build1 (BIT_NOT_EXPR,
5720 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5722 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5726 /* If the right sides are not constant, do the same for it. Also,
5727 disallow this optimization if a size or signedness mismatch occurs
5728 between the left and right sides. */
5731 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5732 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5733 /* Make sure the two fields on the right
5734 correspond to the left without being swapped. */
5735 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5738 first_bit = MIN (lr_bitpos, rr_bitpos);
5739 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5740 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5741 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5743 if (rnmode == VOIDmode)
5746 rnbitsize = GET_MODE_BITSIZE (rnmode);
5747 rnbitpos = first_bit & ~ (rnbitsize - 1);
5748 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5749 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5751 if (BYTES_BIG_ENDIAN)
5753 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5754 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5757 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5758 size_int (xlr_bitpos), 0);
5759 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5760 size_int (xrr_bitpos), 0);
5762 /* Make a mask that corresponds to both fields being compared.
5763 Do this for both items being compared. If the operands are the
5764 same size and the bits being compared are in the same position
5765 then we can do this by masking both and comparing the masked
5767 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5768 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5769 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5771 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5772 ll_unsignedp || rl_unsignedp);
5773 if (! all_ones_mask_p (ll_mask, lnbitsize))
5774 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5776 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5777 lr_unsignedp || rr_unsignedp);
5778 if (! all_ones_mask_p (lr_mask, rnbitsize))
5779 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5781 return build2 (wanted_code, truth_type, lhs, rhs);
5784 /* There is still another way we can do something: If both pairs of
5785 fields being compared are adjacent, we may be able to make a wider
5786 field containing them both.
5788 Note that we still must mask the lhs/rhs expressions. Furthermore,
5789 the mask must be shifted to account for the shift done by
5790 make_bit_field_ref. */
5791 if ((ll_bitsize + ll_bitpos == rl_bitpos
5792 && lr_bitsize + lr_bitpos == rr_bitpos)
5793 || (ll_bitpos == rl_bitpos + rl_bitsize
5794 && lr_bitpos == rr_bitpos + rr_bitsize))
5798 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5799 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5800 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5801 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5803 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5804 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5805 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5806 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5808 /* Convert to the smaller type before masking out unwanted bits. */
5810 if (lntype != rntype)
5812 if (lnbitsize > rnbitsize)
5814 lhs = fold_convert (rntype, lhs);
5815 ll_mask = fold_convert (rntype, ll_mask);
5818 else if (lnbitsize < rnbitsize)
5820 rhs = fold_convert (lntype, rhs);
5821 lr_mask = fold_convert (lntype, lr_mask);
5826 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5827 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5829 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5830 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5832 return build2 (wanted_code, truth_type, lhs, rhs);
5838 /* Handle the case of comparisons with constants. If there is something in
5839 common between the masks, those bits of the constants must be the same.
5840 If not, the condition is always false. Test for this to avoid generating
5841 incorrect code below. */
5842 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5843 if (! integer_zerop (result)
5844 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5845 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5847 if (wanted_code == NE_EXPR)
5849 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5850 return constant_boolean_node (true, truth_type);
5854 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5855 return constant_boolean_node (false, truth_type);
5859 /* Construct the expression we will return. First get the component
5860 reference we will make. Unless the mask is all ones the width of
5861 that field, perform the mask operation. Then compare with the
5863 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5864 ll_unsignedp || rl_unsignedp);
5866 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5867 if (! all_ones_mask_p (ll_mask, lnbitsize))
5868 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5870 return build2 (wanted_code, truth_type, result,
5871 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5874 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5878 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5881 enum tree_code op_code;
5882 tree comp_const = op1;
5884 int consts_equal, consts_lt;
5887 STRIP_SIGN_NOPS (arg0);
5889 op_code = TREE_CODE (arg0);
5890 minmax_const = TREE_OPERAND (arg0, 1);
5891 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5892 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5893 inner = TREE_OPERAND (arg0, 0);
5895 /* If something does not permit us to optimize, return the original tree. */
5896 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5897 || TREE_CODE (comp_const) != INTEGER_CST
5898 || TREE_OVERFLOW (comp_const)
5899 || TREE_CODE (minmax_const) != INTEGER_CST
5900 || TREE_OVERFLOW (minmax_const))
5903 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5904 and GT_EXPR, doing the rest with recursive calls using logical
5908 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5910 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5913 return invert_truthvalue (tem);
5919 fold_build2 (TRUTH_ORIF_EXPR, type,
5920 optimize_minmax_comparison
5921 (EQ_EXPR, type, arg0, comp_const),
5922 optimize_minmax_comparison
5923 (GT_EXPR, type, arg0, comp_const));
5926 if (op_code == MAX_EXPR && consts_equal)
5927 /* MAX (X, 0) == 0 -> X <= 0 */
5928 return fold_build2 (LE_EXPR, type, inner, comp_const);
5930 else if (op_code == MAX_EXPR && consts_lt)
5931 /* MAX (X, 0) == 5 -> X == 5 */
5932 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5934 else if (op_code == MAX_EXPR)
5935 /* MAX (X, 0) == -1 -> false */
5936 return omit_one_operand (type, integer_zero_node, inner);
5938 else if (consts_equal)
5939 /* MIN (X, 0) == 0 -> X >= 0 */
5940 return fold_build2 (GE_EXPR, type, inner, comp_const);
5943 /* MIN (X, 0) == 5 -> false */
5944 return omit_one_operand (type, integer_zero_node, inner);
5947 /* MIN (X, 0) == -1 -> X == -1 */
5948 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5951 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5952 /* MAX (X, 0) > 0 -> X > 0
5953 MAX (X, 0) > 5 -> X > 5 */
5954 return fold_build2 (GT_EXPR, type, inner, comp_const);
5956 else if (op_code == MAX_EXPR)
5957 /* MAX (X, 0) > -1 -> true */
5958 return omit_one_operand (type, integer_one_node, inner);
5960 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5961 /* MIN (X, 0) > 0 -> false
5962 MIN (X, 0) > 5 -> false */
5963 return omit_one_operand (type, integer_zero_node, inner);
5966 /* MIN (X, 0) > -1 -> X > -1 */
5967 return fold_build2 (GT_EXPR, type, inner, comp_const);
5974 /* T is an integer expression that is being multiplied, divided, or taken a
5975 modulus (CODE says which and what kind of divide or modulus) by a
5976 constant C. See if we can eliminate that operation by folding it with
5977 other operations already in T. WIDE_TYPE, if non-null, is a type that
5978 should be used for the computation if wider than our type.
5980 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5981 (X * 2) + (Y * 4). We must, however, be assured that either the original
5982 expression would not overflow or that overflow is undefined for the type
5983 in the language in question.
5985 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5986 the machine has a multiply-accumulate insn or that this is part of an
5987 addressing calculation.
5989 If we return a non-null expression, it is an equivalent form of the
5990 original computation, but need not be in the original type.
5992 We set *STRICT_OVERFLOW_P to true if the return values depends on
5993 signed overflow being undefined. Otherwise we do not change
5994 *STRICT_OVERFLOW_P. */
5997 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5998 bool *strict_overflow_p)
6000 /* To avoid exponential search depth, refuse to allow recursion past
6001 three levels. Beyond that (1) it's highly unlikely that we'll find
6002 something interesting and (2) we've probably processed it before
6003 when we built the inner expression. */
6012 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6019 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6020 bool *strict_overflow_p)
6022 tree type = TREE_TYPE (t);
6023 enum tree_code tcode = TREE_CODE (t);
6024 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6025 > GET_MODE_SIZE (TYPE_MODE (type)))
6026 ? wide_type : type);
6028 int same_p = tcode == code;
6029 tree op0 = NULL_TREE, op1 = NULL_TREE;
6030 bool sub_strict_overflow_p;
6032 /* Don't deal with constants of zero here; they confuse the code below. */
6033 if (integer_zerop (c))
6036 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6037 op0 = TREE_OPERAND (t, 0);
6039 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6040 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6042 /* Note that we need not handle conditional operations here since fold
6043 already handles those cases. So just do arithmetic here. */
6047 /* For a constant, we can always simplify if we are a multiply
6048 or (for divide and modulus) if it is a multiple of our constant. */
6049 if (code == MULT_EXPR
6050 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6051 return const_binop (code, fold_convert (ctype, t),
6052 fold_convert (ctype, c), 0);
6055 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
6056 /* If op0 is an expression ... */
6057 if ((COMPARISON_CLASS_P (op0)
6058 || UNARY_CLASS_P (op0)
6059 || BINARY_CLASS_P (op0)
6060 || VL_EXP_CLASS_P (op0)
6061 || EXPRESSION_CLASS_P (op0))
6062 /* ... and is unsigned, and its type is smaller than ctype,
6063 then we cannot pass through as widening. */
6064 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
6065 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6066 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6067 && (GET_MODE_SIZE (TYPE_MODE (ctype))
6068 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
6069 /* ... or this is a truncation (t is narrower than op0),
6070 then we cannot pass through this narrowing. */
6071 || (GET_MODE_SIZE (TYPE_MODE (type))
6072 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
6073 /* ... or signedness changes for division or modulus,
6074 then we cannot pass through this conversion. */
6075 || (code != MULT_EXPR
6076 && (TYPE_UNSIGNED (ctype)
6077 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6078 /* ... or has undefined overflow while the converted to
6079 type has not, we cannot do the operation in the inner type
6080 as that would introduce undefined overflow. */
6081 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6082 && !TYPE_OVERFLOW_UNDEFINED (type))))
6085 /* Pass the constant down and see if we can make a simplification. If
6086 we can, replace this expression with the inner simplification for
6087 possible later conversion to our or some other type. */
6088 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6089 && TREE_CODE (t2) == INTEGER_CST
6090 && !TREE_OVERFLOW (t2)
6091 && (0 != (t1 = extract_muldiv (op0, t2, code,
6093 ? ctype : NULL_TREE,
6094 strict_overflow_p))))
6099 /* If widening the type changes it from signed to unsigned, then we
6100 must avoid building ABS_EXPR itself as unsigned. */
6101 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6103 tree cstype = (*signed_type_for) (ctype);
6104 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6107 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6108 return fold_convert (ctype, t1);
6112 /* If the constant is negative, we cannot simplify this. */
6113 if (tree_int_cst_sgn (c) == -1)
6117 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6119 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6122 case MIN_EXPR: case MAX_EXPR:
6123 /* If widening the type changes the signedness, then we can't perform
6124 this optimization as that changes the result. */
6125 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6128 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6129 sub_strict_overflow_p = false;
6130 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6131 &sub_strict_overflow_p)) != 0
6132 && (t2 = extract_muldiv (op1, c, code, wide_type,
6133 &sub_strict_overflow_p)) != 0)
6135 if (tree_int_cst_sgn (c) < 0)
6136 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6137 if (sub_strict_overflow_p)
6138 *strict_overflow_p = true;
6139 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6140 fold_convert (ctype, t2));
6144 case LSHIFT_EXPR: case RSHIFT_EXPR:
6145 /* If the second operand is constant, this is a multiplication
6146 or floor division, by a power of two, so we can treat it that
6147 way unless the multiplier or divisor overflows. Signed
6148 left-shift overflow is implementation-defined rather than
6149 undefined in C90, so do not convert signed left shift into
6151 if (TREE_CODE (op1) == INTEGER_CST
6152 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6153 /* const_binop may not detect overflow correctly,
6154 so check for it explicitly here. */
6155 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6156 && TREE_INT_CST_HIGH (op1) == 0
6157 && 0 != (t1 = fold_convert (ctype,
6158 const_binop (LSHIFT_EXPR,
6161 && !TREE_OVERFLOW (t1))
6162 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6163 ? MULT_EXPR : FLOOR_DIV_EXPR,
6164 ctype, fold_convert (ctype, op0), t1),
6165 c, code, wide_type, strict_overflow_p);
6168 case PLUS_EXPR: case MINUS_EXPR:
6169 /* See if we can eliminate the operation on both sides. If we can, we
6170 can return a new PLUS or MINUS. If we can't, the only remaining
6171 cases where we can do anything are if the second operand is a
6173 sub_strict_overflow_p = false;
6174 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6175 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6176 if (t1 != 0 && t2 != 0
6177 && (code == MULT_EXPR
6178 /* If not multiplication, we can only do this if both operands
6179 are divisible by c. */
6180 || (multiple_of_p (ctype, op0, c)
6181 && multiple_of_p (ctype, op1, c))))
6183 if (sub_strict_overflow_p)
6184 *strict_overflow_p = true;
6185 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6186 fold_convert (ctype, t2));
6189 /* If this was a subtraction, negate OP1 and set it to be an addition.
6190 This simplifies the logic below. */
6191 if (tcode == MINUS_EXPR)
6192 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6194 if (TREE_CODE (op1) != INTEGER_CST)
6197 /* If either OP1 or C are negative, this optimization is not safe for
6198 some of the division and remainder types while for others we need
6199 to change the code. */
6200 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6202 if (code == CEIL_DIV_EXPR)
6203 code = FLOOR_DIV_EXPR;
6204 else if (code == FLOOR_DIV_EXPR)
6205 code = CEIL_DIV_EXPR;
6206 else if (code != MULT_EXPR
6207 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6211 /* If it's a multiply or a division/modulus operation of a multiple
6212 of our constant, do the operation and verify it doesn't overflow. */
6213 if (code == MULT_EXPR
6214 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6216 op1 = const_binop (code, fold_convert (ctype, op1),
6217 fold_convert (ctype, c), 0);
6218 /* We allow the constant to overflow with wrapping semantics. */
6220 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6226 /* If we have an unsigned type is not a sizetype, we cannot widen
6227 the operation since it will change the result if the original
6228 computation overflowed. */
6229 if (TYPE_UNSIGNED (ctype)
6230 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6234 /* If we were able to eliminate our operation from the first side,
6235 apply our operation to the second side and reform the PLUS. */
6236 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6237 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6239 /* The last case is if we are a multiply. In that case, we can
6240 apply the distributive law to commute the multiply and addition
6241 if the multiplication of the constants doesn't overflow. */
6242 if (code == MULT_EXPR)
6243 return fold_build2 (tcode, ctype,
6244 fold_build2 (code, ctype,
6245 fold_convert (ctype, op0),
6246 fold_convert (ctype, c)),
6252 /* We have a special case here if we are doing something like
6253 (C * 8) % 4 since we know that's zero. */
6254 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6255 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6256 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6257 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6258 return omit_one_operand (type, integer_zero_node, op0);
6260 /* ... fall through ... */
6262 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6263 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6264 /* If we can extract our operation from the LHS, do so and return a
6265 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6266 do something only if the second operand is a constant. */
6268 && (t1 = extract_muldiv (op0, c, code, wide_type,
6269 strict_overflow_p)) != 0)
6270 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6271 fold_convert (ctype, op1));
6272 else if (tcode == MULT_EXPR && code == MULT_EXPR
6273 && (t1 = extract_muldiv (op1, c, code, wide_type,
6274 strict_overflow_p)) != 0)
6275 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6276 fold_convert (ctype, t1));
6277 else if (TREE_CODE (op1) != INTEGER_CST)
6280 /* If these are the same operation types, we can associate them
6281 assuming no overflow. */
6283 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
6284 fold_convert (ctype, c), 0))
6285 && !TREE_OVERFLOW (t1))
6286 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6288 /* If these operations "cancel" each other, we have the main
6289 optimizations of this pass, which occur when either constant is a
6290 multiple of the other, in which case we replace this with either an
6291 operation or CODE or TCODE.
6293 If we have an unsigned type that is not a sizetype, we cannot do
6294 this since it will change the result if the original computation
6296 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6297 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6298 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6299 || (tcode == MULT_EXPR
6300 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6301 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6302 && code != MULT_EXPR)))
6304 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6306 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6307 *strict_overflow_p = true;
6308 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6309 fold_convert (ctype,
6310 const_binop (TRUNC_DIV_EXPR,
6313 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6315 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6316 *strict_overflow_p = true;
6317 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6318 fold_convert (ctype,
6319 const_binop (TRUNC_DIV_EXPR,
6332 /* Return a node which has the indicated constant VALUE (either 0 or
6333 1), and is of the indicated TYPE. */
6336 constant_boolean_node (int value, tree type)
6338 if (type == integer_type_node)
6339 return value ? integer_one_node : integer_zero_node;
6340 else if (type == boolean_type_node)
6341 return value ? boolean_true_node : boolean_false_node;
6343 return build_int_cst (type, value);
6347 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6348 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6349 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6350 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6351 COND is the first argument to CODE; otherwise (as in the example
6352 given here), it is the second argument. TYPE is the type of the
6353 original expression. Return NULL_TREE if no simplification is
6357 fold_binary_op_with_conditional_arg (enum tree_code code,
6358 tree type, tree op0, tree op1,
6359 tree cond, tree arg, int cond_first_p)
6361 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6362 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6363 tree test, true_value, false_value;
6364 tree lhs = NULL_TREE;
6365 tree rhs = NULL_TREE;
6367 /* This transformation is only worthwhile if we don't have to wrap
6368 arg in a SAVE_EXPR, and the operation can be simplified on at least
6369 one of the branches once its pushed inside the COND_EXPR. */
6370 if (!TREE_CONSTANT (arg))
6373 if (TREE_CODE (cond) == COND_EXPR)
6375 test = TREE_OPERAND (cond, 0);
6376 true_value = TREE_OPERAND (cond, 1);
6377 false_value = TREE_OPERAND (cond, 2);
6378 /* If this operand throws an expression, then it does not make
6379 sense to try to perform a logical or arithmetic operation
6381 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6383 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6388 tree testtype = TREE_TYPE (cond);
6390 true_value = constant_boolean_node (true, testtype);
6391 false_value = constant_boolean_node (false, testtype);
6394 arg = fold_convert (arg_type, arg);
6397 true_value = fold_convert (cond_type, true_value);
6399 lhs = fold_build2 (code, type, true_value, arg);
6401 lhs = fold_build2 (code, type, arg, true_value);
6405 false_value = fold_convert (cond_type, false_value);
6407 rhs = fold_build2 (code, type, false_value, arg);
6409 rhs = fold_build2 (code, type, arg, false_value);
6412 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6413 return fold_convert (type, test);
6417 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6419 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6420 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6421 ADDEND is the same as X.
6423 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6424 and finite. The problematic cases are when X is zero, and its mode
6425 has signed zeros. In the case of rounding towards -infinity,
6426 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6427 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6430 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6432 if (!real_zerop (addend))
6435 /* Don't allow the fold with -fsignaling-nans. */
6436 if (HONOR_SNANS (TYPE_MODE (type)))
6439 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6440 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6443 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6444 if (TREE_CODE (addend) == REAL_CST
6445 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6448 /* The mode has signed zeros, and we have to honor their sign.
6449 In this situation, there is only one case we can return true for.
6450 X - 0 is the same as X unless rounding towards -infinity is
6452 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6455 /* Subroutine of fold() that checks comparisons of built-in math
6456 functions against real constants.
6458 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6459 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6460 is the type of the result and ARG0 and ARG1 are the operands of the
6461 comparison. ARG1 must be a TREE_REAL_CST.
6463 The function returns the constant folded tree if a simplification
6464 can be made, and NULL_TREE otherwise. */
6467 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6468 tree type, tree arg0, tree arg1)
6472 if (BUILTIN_SQRT_P (fcode))
6474 tree arg = CALL_EXPR_ARG (arg0, 0);
6475 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6477 c = TREE_REAL_CST (arg1);
6478 if (REAL_VALUE_NEGATIVE (c))
6480 /* sqrt(x) < y is always false, if y is negative. */
6481 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6482 return omit_one_operand (type, integer_zero_node, arg);
6484 /* sqrt(x) > y is always true, if y is negative and we
6485 don't care about NaNs, i.e. negative values of x. */
6486 if (code == NE_EXPR || !HONOR_NANS (mode))
6487 return omit_one_operand (type, integer_one_node, arg);
6489 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6490 return fold_build2 (GE_EXPR, type, arg,
6491 build_real (TREE_TYPE (arg), dconst0));
6493 else if (code == GT_EXPR || code == GE_EXPR)
6497 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6498 real_convert (&c2, mode, &c2);
6500 if (REAL_VALUE_ISINF (c2))
6502 /* sqrt(x) > y is x == +Inf, when y is very large. */
6503 if (HONOR_INFINITIES (mode))
6504 return fold_build2 (EQ_EXPR, type, arg,
6505 build_real (TREE_TYPE (arg), c2));
6507 /* sqrt(x) > y is always false, when y is very large
6508 and we don't care about infinities. */
6509 return omit_one_operand (type, integer_zero_node, arg);
6512 /* sqrt(x) > c is the same as x > c*c. */
6513 return fold_build2 (code, type, arg,
6514 build_real (TREE_TYPE (arg), c2));
6516 else if (code == LT_EXPR || code == LE_EXPR)
6520 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6521 real_convert (&c2, mode, &c2);
6523 if (REAL_VALUE_ISINF (c2))
6525 /* sqrt(x) < y is always true, when y is a very large
6526 value and we don't care about NaNs or Infinities. */
6527 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6528 return omit_one_operand (type, integer_one_node, arg);
6530 /* sqrt(x) < y is x != +Inf when y is very large and we
6531 don't care about NaNs. */
6532 if (! HONOR_NANS (mode))
6533 return fold_build2 (NE_EXPR, type, arg,
6534 build_real (TREE_TYPE (arg), c2));
6536 /* sqrt(x) < y is x >= 0 when y is very large and we
6537 don't care about Infinities. */
6538 if (! HONOR_INFINITIES (mode))
6539 return fold_build2 (GE_EXPR, type, arg,
6540 build_real (TREE_TYPE (arg), dconst0));
6542 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6543 if (lang_hooks.decls.global_bindings_p () != 0
6544 || CONTAINS_PLACEHOLDER_P (arg))
6547 arg = save_expr (arg);
6548 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6549 fold_build2 (GE_EXPR, type, arg,
6550 build_real (TREE_TYPE (arg),
6552 fold_build2 (NE_EXPR, type, arg,
6553 build_real (TREE_TYPE (arg),
6557 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6558 if (! HONOR_NANS (mode))
6559 return fold_build2 (code, type, arg,
6560 build_real (TREE_TYPE (arg), c2));
6562 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6563 if (lang_hooks.decls.global_bindings_p () == 0
6564 && ! CONTAINS_PLACEHOLDER_P (arg))
6566 arg = save_expr (arg);
6567 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6568 fold_build2 (GE_EXPR, type, arg,
6569 build_real (TREE_TYPE (arg),
6571 fold_build2 (code, type, arg,
6572 build_real (TREE_TYPE (arg),
6581 /* Subroutine of fold() that optimizes comparisons against Infinities,
6582 either +Inf or -Inf.
6584 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6585 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6586 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6588 The function returns the constant folded tree if a simplification
6589 can be made, and NULL_TREE otherwise. */
6592 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6594 enum machine_mode mode;
6595 REAL_VALUE_TYPE max;
6599 mode = TYPE_MODE (TREE_TYPE (arg0));
6601 /* For negative infinity swap the sense of the comparison. */
6602 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6604 code = swap_tree_comparison (code);
6609 /* x > +Inf is always false, if with ignore sNANs. */
6610 if (HONOR_SNANS (mode))
6612 return omit_one_operand (type, integer_zero_node, arg0);
6615 /* x <= +Inf is always true, if we don't case about NaNs. */
6616 if (! HONOR_NANS (mode))
6617 return omit_one_operand (type, integer_one_node, arg0);
6619 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6620 if (lang_hooks.decls.global_bindings_p () == 0
6621 && ! CONTAINS_PLACEHOLDER_P (arg0))
6623 arg0 = save_expr (arg0);
6624 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6630 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6631 real_maxval (&max, neg, mode);
6632 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6633 arg0, build_real (TREE_TYPE (arg0), max));
6636 /* x < +Inf is always equal to x <= DBL_MAX. */
6637 real_maxval (&max, neg, mode);
6638 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6639 arg0, build_real (TREE_TYPE (arg0), max));
6642 /* x != +Inf is always equal to !(x > DBL_MAX). */
6643 real_maxval (&max, neg, mode);
6644 if (! HONOR_NANS (mode))
6645 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6646 arg0, build_real (TREE_TYPE (arg0), max));
6648 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6649 arg0, build_real (TREE_TYPE (arg0), max));
6650 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6659 /* Subroutine of fold() that optimizes comparisons of a division by
6660 a nonzero integer constant against an integer constant, i.e.
6663 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6664 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6665 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6667 The function returns the constant folded tree if a simplification
6668 can be made, and NULL_TREE otherwise. */
6671 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6673 tree prod, tmp, hi, lo;
6674 tree arg00 = TREE_OPERAND (arg0, 0);
6675 tree arg01 = TREE_OPERAND (arg0, 1);
6676 unsigned HOST_WIDE_INT lpart;
6677 HOST_WIDE_INT hpart;
6678 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6682 /* We have to do this the hard way to detect unsigned overflow.
6683 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6684 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6685 TREE_INT_CST_HIGH (arg01),
6686 TREE_INT_CST_LOW (arg1),
6687 TREE_INT_CST_HIGH (arg1),
6688 &lpart, &hpart, unsigned_p);
6689 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6691 neg_overflow = false;
6695 tmp = int_const_binop (MINUS_EXPR, arg01,
6696 build_int_cst (TREE_TYPE (arg01), 1), 0);
6699 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6700 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6701 TREE_INT_CST_HIGH (prod),
6702 TREE_INT_CST_LOW (tmp),
6703 TREE_INT_CST_HIGH (tmp),
6704 &lpart, &hpart, unsigned_p);
6705 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6706 -1, overflow | TREE_OVERFLOW (prod));
6708 else if (tree_int_cst_sgn (arg01) >= 0)
6710 tmp = int_const_binop (MINUS_EXPR, arg01,
6711 build_int_cst (TREE_TYPE (arg01), 1), 0);
6712 switch (tree_int_cst_sgn (arg1))
6715 neg_overflow = true;
6716 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6721 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6726 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6736 /* A negative divisor reverses the relational operators. */
6737 code = swap_tree_comparison (code);
6739 tmp = int_const_binop (PLUS_EXPR, arg01,
6740 build_int_cst (TREE_TYPE (arg01), 1), 0);
6741 switch (tree_int_cst_sgn (arg1))
6744 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6749 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6754 neg_overflow = true;
6755 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6767 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6768 return omit_one_operand (type, integer_zero_node, arg00);
6769 if (TREE_OVERFLOW (hi))
6770 return fold_build2 (GE_EXPR, type, arg00, lo);
6771 if (TREE_OVERFLOW (lo))
6772 return fold_build2 (LE_EXPR, type, arg00, hi);
6773 return build_range_check (type, arg00, 1, lo, hi);
6776 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6777 return omit_one_operand (type, integer_one_node, arg00);
6778 if (TREE_OVERFLOW (hi))
6779 return fold_build2 (LT_EXPR, type, arg00, lo);
6780 if (TREE_OVERFLOW (lo))
6781 return fold_build2 (GT_EXPR, type, arg00, hi);
6782 return build_range_check (type, arg00, 0, lo, hi);
6785 if (TREE_OVERFLOW (lo))
6787 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6788 return omit_one_operand (type, tmp, arg00);
6790 return fold_build2 (LT_EXPR, type, arg00, lo);
6793 if (TREE_OVERFLOW (hi))
6795 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6796 return omit_one_operand (type, tmp, arg00);
6798 return fold_build2 (LE_EXPR, type, arg00, hi);
6801 if (TREE_OVERFLOW (hi))
6803 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6804 return omit_one_operand (type, tmp, arg00);
6806 return fold_build2 (GT_EXPR, type, arg00, hi);
6809 if (TREE_OVERFLOW (lo))
6811 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6812 return omit_one_operand (type, tmp, arg00);
6814 return fold_build2 (GE_EXPR, type, arg00, lo);
6824 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6825 equality/inequality test, then return a simplified form of the test
6826 using a sign testing. Otherwise return NULL. TYPE is the desired
6830 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6833 /* If this is testing a single bit, we can optimize the test. */
6834 if ((code == NE_EXPR || code == EQ_EXPR)
6835 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6836 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6838 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6839 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6840 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6842 if (arg00 != NULL_TREE
6843 /* This is only a win if casting to a signed type is cheap,
6844 i.e. when arg00's type is not a partial mode. */
6845 && TYPE_PRECISION (TREE_TYPE (arg00))
6846 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6848 tree stype = signed_type_for (TREE_TYPE (arg00));
6849 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6850 result_type, fold_convert (stype, arg00),
6851 build_int_cst (stype, 0));
6858 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6859 equality/inequality test, then return a simplified form of
6860 the test using shifts and logical operations. Otherwise return
6861 NULL. TYPE is the desired result type. */
6864 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6867 /* If this is testing a single bit, we can optimize the test. */
6868 if ((code == NE_EXPR || code == EQ_EXPR)
6869 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6870 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6872 tree inner = TREE_OPERAND (arg0, 0);
6873 tree type = TREE_TYPE (arg0);
6874 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6875 enum machine_mode operand_mode = TYPE_MODE (type);
6877 tree signed_type, unsigned_type, intermediate_type;
6880 /* First, see if we can fold the single bit test into a sign-bit
6882 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6887 /* Otherwise we have (A & C) != 0 where C is a single bit,
6888 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6889 Similarly for (A & C) == 0. */
6891 /* If INNER is a right shift of a constant and it plus BITNUM does
6892 not overflow, adjust BITNUM and INNER. */
6893 if (TREE_CODE (inner) == RSHIFT_EXPR
6894 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6895 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6896 && bitnum < TYPE_PRECISION (type)
6897 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6898 bitnum - TYPE_PRECISION (type)))
6900 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6901 inner = TREE_OPERAND (inner, 0);
6904 /* If we are going to be able to omit the AND below, we must do our
6905 operations as unsigned. If we must use the AND, we have a choice.
6906 Normally unsigned is faster, but for some machines signed is. */
6907 #ifdef LOAD_EXTEND_OP
6908 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6909 && !flag_syntax_only) ? 0 : 1;
6914 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6915 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6916 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6917 inner = fold_convert (intermediate_type, inner);
6920 inner = build2 (RSHIFT_EXPR, intermediate_type,
6921 inner, size_int (bitnum));
6923 one = build_int_cst (intermediate_type, 1);
6925 if (code == EQ_EXPR)
6926 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6928 /* Put the AND last so it can combine with more things. */
6929 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6931 /* Make sure to return the proper type. */
6932 inner = fold_convert (result_type, inner);
6939 /* Check whether we are allowed to reorder operands arg0 and arg1,
6940 such that the evaluation of arg1 occurs before arg0. */
6943 reorder_operands_p (const_tree arg0, const_tree arg1)
6945 if (! flag_evaluation_order)
6947 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6949 return ! TREE_SIDE_EFFECTS (arg0)
6950 && ! TREE_SIDE_EFFECTS (arg1);
6953 /* Test whether it is preferable two swap two operands, ARG0 and
6954 ARG1, for example because ARG0 is an integer constant and ARG1
6955 isn't. If REORDER is true, only recommend swapping if we can
6956 evaluate the operands in reverse order. */
6959 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6961 STRIP_SIGN_NOPS (arg0);
6962 STRIP_SIGN_NOPS (arg1);
6964 if (TREE_CODE (arg1) == INTEGER_CST)
6966 if (TREE_CODE (arg0) == INTEGER_CST)
6969 if (TREE_CODE (arg1) == REAL_CST)
6971 if (TREE_CODE (arg0) == REAL_CST)
6974 if (TREE_CODE (arg1) == FIXED_CST)
6976 if (TREE_CODE (arg0) == FIXED_CST)
6979 if (TREE_CODE (arg1) == COMPLEX_CST)
6981 if (TREE_CODE (arg0) == COMPLEX_CST)
6984 if (TREE_CONSTANT (arg1))
6986 if (TREE_CONSTANT (arg0))
6992 if (reorder && flag_evaluation_order
6993 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6996 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6997 for commutative and comparison operators. Ensuring a canonical
6998 form allows the optimizers to find additional redundancies without
6999 having to explicitly check for both orderings. */
7000 if (TREE_CODE (arg0) == SSA_NAME
7001 && TREE_CODE (arg1) == SSA_NAME
7002 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7005 /* Put SSA_NAMEs last. */
7006 if (TREE_CODE (arg1) == SSA_NAME)
7008 if (TREE_CODE (arg0) == SSA_NAME)
7011 /* Put variables last. */
7020 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7021 ARG0 is extended to a wider type. */
7024 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7026 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7028 tree shorter_type, outer_type;
7032 if (arg0_unw == arg0)
7034 shorter_type = TREE_TYPE (arg0_unw);
7036 #ifdef HAVE_canonicalize_funcptr_for_compare
7037 /* Disable this optimization if we're casting a function pointer
7038 type on targets that require function pointer canonicalization. */
7039 if (HAVE_canonicalize_funcptr_for_compare
7040 && TREE_CODE (shorter_type) == POINTER_TYPE
7041 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7045 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7048 arg1_unw = get_unwidened (arg1, NULL_TREE);
7050 /* If possible, express the comparison in the shorter mode. */
7051 if ((code == EQ_EXPR || code == NE_EXPR
7052 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7053 && (TREE_TYPE (arg1_unw) == shorter_type
7054 || (TYPE_PRECISION (shorter_type)
7055 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7056 || (TREE_CODE (arg1_unw) == INTEGER_CST
7057 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7058 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7059 && int_fits_type_p (arg1_unw, shorter_type))))
7060 return fold_build2 (code, type, arg0_unw,
7061 fold_convert (shorter_type, arg1_unw));
7063 if (TREE_CODE (arg1_unw) != INTEGER_CST
7064 || TREE_CODE (shorter_type) != INTEGER_TYPE
7065 || !int_fits_type_p (arg1_unw, shorter_type))
7068 /* If we are comparing with the integer that does not fit into the range
7069 of the shorter type, the result is known. */
7070 outer_type = TREE_TYPE (arg1_unw);
7071 min = lower_bound_in_type (outer_type, shorter_type);
7072 max = upper_bound_in_type (outer_type, shorter_type);
7074 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7076 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7083 return omit_one_operand (type, integer_zero_node, arg0);
7088 return omit_one_operand (type, integer_one_node, arg0);
7094 return omit_one_operand (type, integer_one_node, arg0);
7096 return omit_one_operand (type, integer_zero_node, arg0);
7101 return omit_one_operand (type, integer_zero_node, arg0);
7103 return omit_one_operand (type, integer_one_node, arg0);
7112 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7113 ARG0 just the signedness is changed. */
7116 fold_sign_changed_comparison (enum tree_code code, tree type,
7117 tree arg0, tree arg1)
7120 tree inner_type, outer_type;
7122 if (TREE_CODE (arg0) != NOP_EXPR
7123 && TREE_CODE (arg0) != CONVERT_EXPR)
7126 outer_type = TREE_TYPE (arg0);
7127 arg0_inner = TREE_OPERAND (arg0, 0);
7128 inner_type = TREE_TYPE (arg0_inner);
7130 #ifdef HAVE_canonicalize_funcptr_for_compare
7131 /* Disable this optimization if we're casting a function pointer
7132 type on targets that require function pointer canonicalization. */
7133 if (HAVE_canonicalize_funcptr_for_compare
7134 && TREE_CODE (inner_type) == POINTER_TYPE
7135 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7139 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7142 if (TREE_CODE (arg1) != INTEGER_CST
7143 && !((TREE_CODE (arg1) == NOP_EXPR
7144 || TREE_CODE (arg1) == CONVERT_EXPR)
7145 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7148 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7153 if (TREE_CODE (arg1) == INTEGER_CST)
7154 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7155 TREE_INT_CST_HIGH (arg1), 0,
7156 TREE_OVERFLOW (arg1));
7158 arg1 = fold_convert (inner_type, arg1);
7160 return fold_build2 (code, type, arg0_inner, arg1);
7163 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7164 step of the array. Reconstructs s and delta in the case of s * delta
7165 being an integer constant (and thus already folded).
7166 ADDR is the address. MULT is the multiplicative expression.
7167 If the function succeeds, the new address expression is returned. Otherwise
7168 NULL_TREE is returned. */
7171 try_move_mult_to_index (tree addr, tree op1)
7173 tree s, delta, step;
7174 tree ref = TREE_OPERAND (addr, 0), pref;
7179 /* Strip the nops that might be added when converting op1 to sizetype. */
7182 /* Canonicalize op1 into a possibly non-constant delta
7183 and an INTEGER_CST s. */
7184 if (TREE_CODE (op1) == MULT_EXPR)
7186 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7191 if (TREE_CODE (arg0) == INTEGER_CST)
7196 else if (TREE_CODE (arg1) == INTEGER_CST)
7204 else if (TREE_CODE (op1) == INTEGER_CST)
7211 /* Simulate we are delta * 1. */
7213 s = integer_one_node;
7216 for (;; ref = TREE_OPERAND (ref, 0))
7218 if (TREE_CODE (ref) == ARRAY_REF)
7220 /* Remember if this was a multi-dimensional array. */
7221 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7224 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7228 step = array_ref_element_size (ref);
7229 if (TREE_CODE (step) != INTEGER_CST)
7234 if (! tree_int_cst_equal (step, s))
7239 /* Try if delta is a multiple of step. */
7240 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
7246 /* Only fold here if we can verify we do not overflow one
7247 dimension of a multi-dimensional array. */
7252 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7253 || !INTEGRAL_TYPE_P (itype)
7254 || !TYPE_MAX_VALUE (itype)
7255 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7258 tmp = fold_binary (PLUS_EXPR, itype,
7259 fold_convert (itype,
7260 TREE_OPERAND (ref, 1)),
7261 fold_convert (itype, delta));
7263 || TREE_CODE (tmp) != INTEGER_CST
7264 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7273 if (!handled_component_p (ref))
7277 /* We found the suitable array reference. So copy everything up to it,
7278 and replace the index. */
7280 pref = TREE_OPERAND (addr, 0);
7281 ret = copy_node (pref);
7286 pref = TREE_OPERAND (pref, 0);
7287 TREE_OPERAND (pos, 0) = copy_node (pref);
7288 pos = TREE_OPERAND (pos, 0);
7291 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7292 fold_convert (itype,
7293 TREE_OPERAND (pos, 1)),
7294 fold_convert (itype, delta));
7296 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7300 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7301 means A >= Y && A != MAX, but in this case we know that
7302 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7305 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7307 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7309 if (TREE_CODE (bound) == LT_EXPR)
7310 a = TREE_OPERAND (bound, 0);
7311 else if (TREE_CODE (bound) == GT_EXPR)
7312 a = TREE_OPERAND (bound, 1);
7316 typea = TREE_TYPE (a);
7317 if (!INTEGRAL_TYPE_P (typea)
7318 && !POINTER_TYPE_P (typea))
7321 if (TREE_CODE (ineq) == LT_EXPR)
7323 a1 = TREE_OPERAND (ineq, 1);
7324 y = TREE_OPERAND (ineq, 0);
7326 else if (TREE_CODE (ineq) == GT_EXPR)
7328 a1 = TREE_OPERAND (ineq, 0);
7329 y = TREE_OPERAND (ineq, 1);
7334 if (TREE_TYPE (a1) != typea)
7337 if (POINTER_TYPE_P (typea))
7339 /* Convert the pointer types into integer before taking the difference. */
7340 tree ta = fold_convert (ssizetype, a);
7341 tree ta1 = fold_convert (ssizetype, a1);
7342 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7345 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7347 if (!diff || !integer_onep (diff))
7350 return fold_build2 (GE_EXPR, type, a, y);
7353 /* Fold a sum or difference of at least one multiplication.
7354 Returns the folded tree or NULL if no simplification could be made. */
7357 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7359 tree arg00, arg01, arg10, arg11;
7360 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7362 /* (A * C) +- (B * C) -> (A+-B) * C.
7363 (A * C) +- A -> A * (C+-1).
7364 We are most concerned about the case where C is a constant,
7365 but other combinations show up during loop reduction. Since
7366 it is not difficult, try all four possibilities. */
7368 if (TREE_CODE (arg0) == MULT_EXPR)
7370 arg00 = TREE_OPERAND (arg0, 0);
7371 arg01 = TREE_OPERAND (arg0, 1);
7373 else if (TREE_CODE (arg0) == INTEGER_CST)
7375 arg00 = build_one_cst (type);
7380 /* We cannot generate constant 1 for fract. */
7381 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7384 arg01 = build_one_cst (type);
7386 if (TREE_CODE (arg1) == MULT_EXPR)
7388 arg10 = TREE_OPERAND (arg1, 0);
7389 arg11 = TREE_OPERAND (arg1, 1);
7391 else if (TREE_CODE (arg1) == INTEGER_CST)
7393 arg10 = build_one_cst (type);
7398 /* We cannot generate constant 1 for fract. */
7399 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7402 arg11 = build_one_cst (type);
7406 if (operand_equal_p (arg01, arg11, 0))
7407 same = arg01, alt0 = arg00, alt1 = arg10;
7408 else if (operand_equal_p (arg00, arg10, 0))
7409 same = arg00, alt0 = arg01, alt1 = arg11;
7410 else if (operand_equal_p (arg00, arg11, 0))
7411 same = arg00, alt0 = arg01, alt1 = arg10;
7412 else if (operand_equal_p (arg01, arg10, 0))
7413 same = arg01, alt0 = arg00, alt1 = arg11;
7415 /* No identical multiplicands; see if we can find a common
7416 power-of-two factor in non-power-of-two multiplies. This
7417 can help in multi-dimensional array access. */
7418 else if (host_integerp (arg01, 0)
7419 && host_integerp (arg11, 0))
7421 HOST_WIDE_INT int01, int11, tmp;
7424 int01 = TREE_INT_CST_LOW (arg01);
7425 int11 = TREE_INT_CST_LOW (arg11);
7427 /* Move min of absolute values to int11. */
7428 if ((int01 >= 0 ? int01 : -int01)
7429 < (int11 >= 0 ? int11 : -int11))
7431 tmp = int01, int01 = int11, int11 = tmp;
7432 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7439 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7441 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7442 build_int_cst (TREE_TYPE (arg00),
7447 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7452 return fold_build2 (MULT_EXPR, type,
7453 fold_build2 (code, type,
7454 fold_convert (type, alt0),
7455 fold_convert (type, alt1)),
7456 fold_convert (type, same));
7461 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7462 specified by EXPR into the buffer PTR of length LEN bytes.
7463 Return the number of bytes placed in the buffer, or zero
7467 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7469 tree type = TREE_TYPE (expr);
7470 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7471 int byte, offset, word, words;
7472 unsigned char value;
7474 if (total_bytes > len)
7476 words = total_bytes / UNITS_PER_WORD;
7478 for (byte = 0; byte < total_bytes; byte++)
7480 int bitpos = byte * BITS_PER_UNIT;
7481 if (bitpos < HOST_BITS_PER_WIDE_INT)
7482 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7484 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7485 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7487 if (total_bytes > UNITS_PER_WORD)
7489 word = byte / UNITS_PER_WORD;
7490 if (WORDS_BIG_ENDIAN)
7491 word = (words - 1) - word;
7492 offset = word * UNITS_PER_WORD;
7493 if (BYTES_BIG_ENDIAN)
7494 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7496 offset += byte % UNITS_PER_WORD;
7499 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7500 ptr[offset] = value;
7506 /* Subroutine of native_encode_expr. Encode the REAL_CST
7507 specified by EXPR into the buffer PTR of length LEN bytes.
7508 Return the number of bytes placed in the buffer, or zero
7512 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7514 tree type = TREE_TYPE (expr);
7515 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7516 int byte, offset, word, words, bitpos;
7517 unsigned char value;
7519 /* There are always 32 bits in each long, no matter the size of
7520 the hosts long. We handle floating point representations with
7524 if (total_bytes > len)
7526 words = 32 / UNITS_PER_WORD;
7528 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7530 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7531 bitpos += BITS_PER_UNIT)
7533 byte = (bitpos / BITS_PER_UNIT) & 3;
7534 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7536 if (UNITS_PER_WORD < 4)
7538 word = byte / UNITS_PER_WORD;
7539 if (WORDS_BIG_ENDIAN)
7540 word = (words - 1) - word;
7541 offset = word * UNITS_PER_WORD;
7542 if (BYTES_BIG_ENDIAN)
7543 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7545 offset += byte % UNITS_PER_WORD;
7548 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7549 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7554 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7555 specified by EXPR into the buffer PTR of length LEN bytes.
7556 Return the number of bytes placed in the buffer, or zero
7560 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7565 part = TREE_REALPART (expr);
7566 rsize = native_encode_expr (part, ptr, len);
7569 part = TREE_IMAGPART (expr);
7570 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7573 return rsize + isize;
7577 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7578 specified by EXPR into the buffer PTR of length LEN bytes.
7579 Return the number of bytes placed in the buffer, or zero
7583 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7585 int i, size, offset, count;
7586 tree itype, elem, elements;
7589 elements = TREE_VECTOR_CST_ELTS (expr);
7590 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7591 itype = TREE_TYPE (TREE_TYPE (expr));
7592 size = GET_MODE_SIZE (TYPE_MODE (itype));
7593 for (i = 0; i < count; i++)
7597 elem = TREE_VALUE (elements);
7598 elements = TREE_CHAIN (elements);
7605 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7610 if (offset + size > len)
7612 memset (ptr+offset, 0, size);
7620 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7621 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7622 buffer PTR of length LEN bytes. Return the number of bytes
7623 placed in the buffer, or zero upon failure. */
7626 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7628 switch (TREE_CODE (expr))
7631 return native_encode_int (expr, ptr, len);
7634 return native_encode_real (expr, ptr, len);
7637 return native_encode_complex (expr, ptr, len);
7640 return native_encode_vector (expr, ptr, len);
7648 /* Subroutine of native_interpret_expr. Interpret the contents of
7649 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7650 If the buffer cannot be interpreted, return NULL_TREE. */
7653 native_interpret_int (tree type, const unsigned char *ptr, int len)
7655 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7656 int byte, offset, word, words;
7657 unsigned char value;
7658 unsigned int HOST_WIDE_INT lo = 0;
7659 HOST_WIDE_INT hi = 0;
7661 if (total_bytes > len)
7663 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7665 words = total_bytes / UNITS_PER_WORD;
7667 for (byte = 0; byte < total_bytes; byte++)
7669 int bitpos = byte * BITS_PER_UNIT;
7670 if (total_bytes > UNITS_PER_WORD)
7672 word = byte / UNITS_PER_WORD;
7673 if (WORDS_BIG_ENDIAN)
7674 word = (words - 1) - word;
7675 offset = word * UNITS_PER_WORD;
7676 if (BYTES_BIG_ENDIAN)
7677 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7679 offset += byte % UNITS_PER_WORD;
7682 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7683 value = ptr[offset];
7685 if (bitpos < HOST_BITS_PER_WIDE_INT)
7686 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7688 hi |= (unsigned HOST_WIDE_INT) value
7689 << (bitpos - HOST_BITS_PER_WIDE_INT);
7692 return build_int_cst_wide_type (type, lo, hi);
7696 /* Subroutine of native_interpret_expr. Interpret the contents of
7697 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7698 If the buffer cannot be interpreted, return NULL_TREE. */
7701 native_interpret_real (tree type, const unsigned char *ptr, int len)
7703 enum machine_mode mode = TYPE_MODE (type);
7704 int total_bytes = GET_MODE_SIZE (mode);
7705 int byte, offset, word, words, bitpos;
7706 unsigned char value;
7707 /* There are always 32 bits in each long, no matter the size of
7708 the hosts long. We handle floating point representations with
7713 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7714 if (total_bytes > len || total_bytes > 24)
7716 words = 32 / UNITS_PER_WORD;
7718 memset (tmp, 0, sizeof (tmp));
7719 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7720 bitpos += BITS_PER_UNIT)
7722 byte = (bitpos / BITS_PER_UNIT) & 3;
7723 if (UNITS_PER_WORD < 4)
7725 word = byte / UNITS_PER_WORD;
7726 if (WORDS_BIG_ENDIAN)
7727 word = (words - 1) - word;
7728 offset = word * UNITS_PER_WORD;
7729 if (BYTES_BIG_ENDIAN)
7730 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7732 offset += byte % UNITS_PER_WORD;
7735 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7736 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7738 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7741 real_from_target (&r, tmp, mode);
7742 return build_real (type, r);
7746 /* Subroutine of native_interpret_expr. Interpret the contents of
7747 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7748 If the buffer cannot be interpreted, return NULL_TREE. */
7751 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7753 tree etype, rpart, ipart;
7756 etype = TREE_TYPE (type);
7757 size = GET_MODE_SIZE (TYPE_MODE (etype));
7760 rpart = native_interpret_expr (etype, ptr, size);
7763 ipart = native_interpret_expr (etype, ptr+size, size);
7766 return build_complex (type, rpart, ipart);
7770 /* Subroutine of native_interpret_expr. Interpret the contents of
7771 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7772 If the buffer cannot be interpreted, return NULL_TREE. */
7775 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7777 tree etype, elem, elements;
7780 etype = TREE_TYPE (type);
7781 size = GET_MODE_SIZE (TYPE_MODE (etype));
7782 count = TYPE_VECTOR_SUBPARTS (type);
7783 if (size * count > len)
7786 elements = NULL_TREE;
7787 for (i = count - 1; i >= 0; i--)
7789 elem = native_interpret_expr (etype, ptr+(i*size), size);
7792 elements = tree_cons (NULL_TREE, elem, elements);
7794 return build_vector (type, elements);
7798 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7799 the buffer PTR of length LEN as a constant of type TYPE. For
7800 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7801 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7802 return NULL_TREE. */
7805 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7807 switch (TREE_CODE (type))
7812 return native_interpret_int (type, ptr, len);
7815 return native_interpret_real (type, ptr, len);
7818 return native_interpret_complex (type, ptr, len);
7821 return native_interpret_vector (type, ptr, len);
7829 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7830 TYPE at compile-time. If we're unable to perform the conversion
7831 return NULL_TREE. */
7834 fold_view_convert_expr (tree type, tree expr)
7836 /* We support up to 512-bit values (for V8DFmode). */
7837 unsigned char buffer[64];
7840 /* Check that the host and target are sane. */
7841 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7844 len = native_encode_expr (expr, buffer, sizeof (buffer));
7848 return native_interpret_expr (type, buffer, len);
7851 /* Build an expression for the address of T. Folds away INDIRECT_REF
7852 to avoid confusing the gimplify process. When IN_FOLD is true
7853 avoid modifications of T. */
7856 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7858 /* The size of the object is not relevant when talking about its address. */
7859 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7860 t = TREE_OPERAND (t, 0);
7862 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7863 if (TREE_CODE (t) == INDIRECT_REF
7864 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7866 t = TREE_OPERAND (t, 0);
7868 if (TREE_TYPE (t) != ptrtype)
7869 t = build1 (NOP_EXPR, ptrtype, t);
7875 while (handled_component_p (base))
7876 base = TREE_OPERAND (base, 0);
7879 TREE_ADDRESSABLE (base) = 1;
7881 t = build1 (ADDR_EXPR, ptrtype, t);
7884 t = build1 (ADDR_EXPR, ptrtype, t);
7889 /* Build an expression for the address of T with type PTRTYPE. This
7890 function modifies the input parameter 'T' by sometimes setting the
7891 TREE_ADDRESSABLE flag. */
7894 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7896 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7899 /* Build an expression for the address of T. This function modifies
7900 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7901 flag. When called from fold functions, use fold_addr_expr instead. */
7904 build_fold_addr_expr (tree t)
7906 return build_fold_addr_expr_with_type_1 (t,
7907 build_pointer_type (TREE_TYPE (t)),
7911 /* Same as build_fold_addr_expr, builds an expression for the address
7912 of T, but avoids touching the input node 't'. Fold functions
7913 should use this version. */
7916 fold_addr_expr (tree t)
7918 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7920 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7923 /* Fold a unary expression of code CODE and type TYPE with operand
7924 OP0. Return the folded expression if folding is successful.
7925 Otherwise, return NULL_TREE. */
7928 fold_unary (enum tree_code code, tree type, tree op0)
7932 enum tree_code_class kind = TREE_CODE_CLASS (code);
7934 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7935 && TREE_CODE_LENGTH (code) == 1);
7940 if (code == NOP_EXPR || code == CONVERT_EXPR
7941 || code == FLOAT_EXPR || code == ABS_EXPR)
7943 /* Don't use STRIP_NOPS, because signedness of argument type
7945 STRIP_SIGN_NOPS (arg0);
7949 /* Strip any conversions that don't change the mode. This
7950 is safe for every expression, except for a comparison
7951 expression because its signedness is derived from its
7954 Note that this is done as an internal manipulation within
7955 the constant folder, in order to find the simplest
7956 representation of the arguments so that their form can be
7957 studied. In any cases, the appropriate type conversions
7958 should be put back in the tree that will get out of the
7964 if (TREE_CODE_CLASS (code) == tcc_unary)
7966 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7967 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7968 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7969 else if (TREE_CODE (arg0) == COND_EXPR)
7971 tree arg01 = TREE_OPERAND (arg0, 1);
7972 tree arg02 = TREE_OPERAND (arg0, 2);
7973 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7974 arg01 = fold_build1 (code, type, arg01);
7975 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7976 arg02 = fold_build1 (code, type, arg02);
7977 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7980 /* If this was a conversion, and all we did was to move into
7981 inside the COND_EXPR, bring it back out. But leave it if
7982 it is a conversion from integer to integer and the
7983 result precision is no wider than a word since such a
7984 conversion is cheap and may be optimized away by combine,
7985 while it couldn't if it were outside the COND_EXPR. Then return
7986 so we don't get into an infinite recursion loop taking the
7987 conversion out and then back in. */
7989 if ((code == NOP_EXPR || code == CONVERT_EXPR
7990 || code == NON_LVALUE_EXPR)
7991 && TREE_CODE (tem) == COND_EXPR
7992 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7993 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7994 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7995 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7996 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7997 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7998 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8000 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8001 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8002 || flag_syntax_only))
8003 tem = build1 (code, type,
8005 TREE_TYPE (TREE_OPERAND
8006 (TREE_OPERAND (tem, 1), 0)),
8007 TREE_OPERAND (tem, 0),
8008 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8009 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8012 else if (COMPARISON_CLASS_P (arg0))
8014 if (TREE_CODE (type) == BOOLEAN_TYPE)
8016 arg0 = copy_node (arg0);
8017 TREE_TYPE (arg0) = type;
8020 else if (TREE_CODE (type) != INTEGER_TYPE)
8021 return fold_build3 (COND_EXPR, type, arg0,
8022 fold_build1 (code, type,
8024 fold_build1 (code, type,
8025 integer_zero_node));
8034 case FIX_TRUNC_EXPR:
8035 if (TREE_TYPE (op0) == type)
8038 /* If we have (type) (a CMP b) and type is an integral type, return
8039 new expression involving the new type. */
8040 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8041 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8042 TREE_OPERAND (op0, 1));
8044 /* Handle cases of two conversions in a row. */
8045 if (TREE_CODE (op0) == NOP_EXPR
8046 || TREE_CODE (op0) == CONVERT_EXPR)
8048 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8049 tree inter_type = TREE_TYPE (op0);
8050 int inside_int = INTEGRAL_TYPE_P (inside_type);
8051 int inside_ptr = POINTER_TYPE_P (inside_type);
8052 int inside_float = FLOAT_TYPE_P (inside_type);
8053 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8054 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8055 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8056 int inter_int = INTEGRAL_TYPE_P (inter_type);
8057 int inter_ptr = POINTER_TYPE_P (inter_type);
8058 int inter_float = FLOAT_TYPE_P (inter_type);
8059 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8060 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8061 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8062 int final_int = INTEGRAL_TYPE_P (type);
8063 int final_ptr = POINTER_TYPE_P (type);
8064 int final_float = FLOAT_TYPE_P (type);
8065 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8066 unsigned int final_prec = TYPE_PRECISION (type);
8067 int final_unsignedp = TYPE_UNSIGNED (type);
8069 /* In addition to the cases of two conversions in a row
8070 handled below, if we are converting something to its own
8071 type via an object of identical or wider precision, neither
8072 conversion is needed. */
8073 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8074 && (((inter_int || inter_ptr) && final_int)
8075 || (inter_float && final_float))
8076 && inter_prec >= final_prec)
8077 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8079 /* Likewise, if the intermediate and final types are either both
8080 float or both integer, we don't need the middle conversion if
8081 it is wider than the final type and doesn't change the signedness
8082 (for integers). Avoid this if the final type is a pointer
8083 since then we sometimes need the inner conversion. Likewise if
8084 the outer has a precision not equal to the size of its mode. */
8085 if (((inter_int && inside_int)
8086 || (inter_float && inside_float)
8087 || (inter_vec && inside_vec))
8088 && inter_prec >= inside_prec
8089 && (inter_float || inter_vec
8090 || inter_unsignedp == inside_unsignedp)
8091 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8092 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8094 && (! final_vec || inter_prec == inside_prec))
8095 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8097 /* If we have a sign-extension of a zero-extended value, we can
8098 replace that by a single zero-extension. */
8099 if (inside_int && inter_int && final_int
8100 && inside_prec < inter_prec && inter_prec < final_prec
8101 && inside_unsignedp && !inter_unsignedp)
8102 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8104 /* Two conversions in a row are not needed unless:
8105 - some conversion is floating-point (overstrict for now), or
8106 - some conversion is a vector (overstrict for now), or
8107 - the intermediate type is narrower than both initial and
8109 - the intermediate type and innermost type differ in signedness,
8110 and the outermost type is wider than the intermediate, or
8111 - the initial type is a pointer type and the precisions of the
8112 intermediate and final types differ, or
8113 - the final type is a pointer type and the precisions of the
8114 initial and intermediate types differ.
8115 - the initial type is a pointer to an array and the final type
8117 if (! inside_float && ! inter_float && ! final_float
8118 && ! inside_vec && ! inter_vec && ! final_vec
8119 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8120 && ! (inside_int && inter_int
8121 && inter_unsignedp != inside_unsignedp
8122 && inter_prec < final_prec)
8123 && ((inter_unsignedp && inter_prec > inside_prec)
8124 == (final_unsignedp && final_prec > inter_prec))
8125 && ! (inside_ptr && inter_prec != final_prec)
8126 && ! (final_ptr && inside_prec != inter_prec)
8127 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8128 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8129 && ! (inside_ptr && final_ptr
8130 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
8131 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
8132 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8135 /* Handle (T *)&A.B.C for A being of type T and B and C
8136 living at offset zero. This occurs frequently in
8137 C++ upcasting and then accessing the base. */
8138 if (TREE_CODE (op0) == ADDR_EXPR
8139 && POINTER_TYPE_P (type)
8140 && handled_component_p (TREE_OPERAND (op0, 0)))
8142 HOST_WIDE_INT bitsize, bitpos;
8144 enum machine_mode mode;
8145 int unsignedp, volatilep;
8146 tree base = TREE_OPERAND (op0, 0);
8147 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8148 &mode, &unsignedp, &volatilep, false);
8149 /* If the reference was to a (constant) zero offset, we can use
8150 the address of the base if it has the same base type
8151 as the result type. */
8152 if (! offset && bitpos == 0
8153 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8154 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8155 return fold_convert (type, fold_addr_expr (base));
8158 if ((TREE_CODE (op0) == MODIFY_EXPR
8159 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
8160 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
8161 /* Detect assigning a bitfield. */
8162 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
8164 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
8166 /* Don't leave an assignment inside a conversion
8167 unless assigning a bitfield. */
8168 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
8169 /* First do the assignment, then return converted constant. */
8170 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8171 TREE_NO_WARNING (tem) = 1;
8172 TREE_USED (tem) = 1;
8176 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8177 constants (if x has signed type, the sign bit cannot be set
8178 in c). This folds extension into the BIT_AND_EXPR. */
8179 if (INTEGRAL_TYPE_P (type)
8180 && TREE_CODE (type) != BOOLEAN_TYPE
8181 && TREE_CODE (op0) == BIT_AND_EXPR
8182 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8185 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8188 if (TYPE_UNSIGNED (TREE_TYPE (and))
8189 || (TYPE_PRECISION (type)
8190 <= TYPE_PRECISION (TREE_TYPE (and))))
8192 else if (TYPE_PRECISION (TREE_TYPE (and1))
8193 <= HOST_BITS_PER_WIDE_INT
8194 && host_integerp (and1, 1))
8196 unsigned HOST_WIDE_INT cst;
8198 cst = tree_low_cst (and1, 1);
8199 cst &= (HOST_WIDE_INT) -1
8200 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8201 change = (cst == 0);
8202 #ifdef LOAD_EXTEND_OP
8204 && !flag_syntax_only
8205 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8208 tree uns = unsigned_type_for (TREE_TYPE (and0));
8209 and0 = fold_convert (uns, and0);
8210 and1 = fold_convert (uns, and1);
8216 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8217 TREE_INT_CST_HIGH (and1), 0,
8218 TREE_OVERFLOW (and1));
8219 return fold_build2 (BIT_AND_EXPR, type,
8220 fold_convert (type, and0), tem);
8224 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8225 when one of the new casts will fold away. Conservatively we assume
8226 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8227 if (POINTER_TYPE_P (type)
8228 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8229 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8230 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8231 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8233 tree arg00 = TREE_OPERAND (arg0, 0);
8234 tree arg01 = TREE_OPERAND (arg0, 1);
8236 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8237 fold_convert (sizetype, arg01));
8240 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8241 of the same precision, and X is an integer type not narrower than
8242 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8243 if (INTEGRAL_TYPE_P (type)
8244 && TREE_CODE (op0) == BIT_NOT_EXPR
8245 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8246 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
8247 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
8248 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8250 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8251 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8252 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8253 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8256 tem = fold_convert_const (code, type, op0);
8257 return tem ? tem : NULL_TREE;
8259 case FIXED_CONVERT_EXPR:
8260 tem = fold_convert_const (code, type, arg0);
8261 return tem ? tem : NULL_TREE;
8263 case VIEW_CONVERT_EXPR:
8264 if (TREE_TYPE (op0) == type)
8266 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR
8267 || (TREE_CODE (op0) == NOP_EXPR
8268 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8269 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8270 && TYPE_PRECISION (TREE_TYPE (op0))
8271 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8272 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8273 return fold_view_convert_expr (type, op0);
8276 tem = fold_negate_expr (arg0);
8278 return fold_convert (type, tem);
8282 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8283 return fold_abs_const (arg0, type);
8284 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8285 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8286 /* Convert fabs((double)float) into (double)fabsf(float). */
8287 else if (TREE_CODE (arg0) == NOP_EXPR
8288 && TREE_CODE (type) == REAL_TYPE)
8290 tree targ0 = strip_float_extensions (arg0);
8292 return fold_convert (type, fold_build1 (ABS_EXPR,
8296 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8297 else if (TREE_CODE (arg0) == ABS_EXPR)
8299 else if (tree_expr_nonnegative_p (arg0))
8302 /* Strip sign ops from argument. */
8303 if (TREE_CODE (type) == REAL_TYPE)
8305 tem = fold_strip_sign_ops (arg0);
8307 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8312 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8313 return fold_convert (type, arg0);
8314 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8316 tree itype = TREE_TYPE (type);
8317 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8318 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8319 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8321 if (TREE_CODE (arg0) == COMPLEX_CST)
8323 tree itype = TREE_TYPE (type);
8324 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8325 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8326 return build_complex (type, rpart, negate_expr (ipart));
8328 if (TREE_CODE (arg0) == CONJ_EXPR)
8329 return fold_convert (type, TREE_OPERAND (arg0, 0));
8333 if (TREE_CODE (arg0) == INTEGER_CST)
8334 return fold_not_const (arg0, type);
8335 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8336 return TREE_OPERAND (op0, 0);
8337 /* Convert ~ (-A) to A - 1. */
8338 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8339 return fold_build2 (MINUS_EXPR, type,
8340 fold_convert (type, TREE_OPERAND (arg0, 0)),
8341 build_int_cst (type, 1));
8342 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8343 else if (INTEGRAL_TYPE_P (type)
8344 && ((TREE_CODE (arg0) == MINUS_EXPR
8345 && integer_onep (TREE_OPERAND (arg0, 1)))
8346 || (TREE_CODE (arg0) == PLUS_EXPR
8347 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8348 return fold_build1 (NEGATE_EXPR, type,
8349 fold_convert (type, TREE_OPERAND (arg0, 0)));
8350 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8351 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8352 && (tem = fold_unary (BIT_NOT_EXPR, type,
8354 TREE_OPERAND (arg0, 0)))))
8355 return fold_build2 (BIT_XOR_EXPR, type, tem,
8356 fold_convert (type, TREE_OPERAND (arg0, 1)));
8357 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8358 && (tem = fold_unary (BIT_NOT_EXPR, type,
8360 TREE_OPERAND (arg0, 1)))))
8361 return fold_build2 (BIT_XOR_EXPR, type,
8362 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8363 /* Perform BIT_NOT_EXPR on each element individually. */
8364 else if (TREE_CODE (arg0) == VECTOR_CST)
8366 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8367 int count = TYPE_VECTOR_SUBPARTS (type), i;
8369 for (i = 0; i < count; i++)
8373 elem = TREE_VALUE (elements);
8374 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8375 if (elem == NULL_TREE)
8377 elements = TREE_CHAIN (elements);
8380 elem = build_int_cst (TREE_TYPE (type), -1);
8381 list = tree_cons (NULL_TREE, elem, list);
8384 return build_vector (type, nreverse (list));
8389 case TRUTH_NOT_EXPR:
8390 /* The argument to invert_truthvalue must have Boolean type. */
8391 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8392 arg0 = fold_convert (boolean_type_node, arg0);
8394 /* Note that the operand of this must be an int
8395 and its values must be 0 or 1.
8396 ("true" is a fixed value perhaps depending on the language,
8397 but we don't handle values other than 1 correctly yet.) */
8398 tem = fold_truth_not_expr (arg0);
8401 return fold_convert (type, tem);
8404 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8405 return fold_convert (type, arg0);
8406 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8407 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8408 TREE_OPERAND (arg0, 1));
8409 if (TREE_CODE (arg0) == COMPLEX_CST)
8410 return fold_convert (type, TREE_REALPART (arg0));
8411 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8413 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8414 tem = fold_build2 (TREE_CODE (arg0), itype,
8415 fold_build1 (REALPART_EXPR, itype,
8416 TREE_OPERAND (arg0, 0)),
8417 fold_build1 (REALPART_EXPR, itype,
8418 TREE_OPERAND (arg0, 1)));
8419 return fold_convert (type, tem);
8421 if (TREE_CODE (arg0) == CONJ_EXPR)
8423 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8424 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8425 return fold_convert (type, tem);
8427 if (TREE_CODE (arg0) == CALL_EXPR)
8429 tree fn = get_callee_fndecl (arg0);
8430 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8431 switch (DECL_FUNCTION_CODE (fn))
8433 CASE_FLT_FN (BUILT_IN_CEXPI):
8434 fn = mathfn_built_in (type, BUILT_IN_COS);
8436 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8446 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8447 return fold_convert (type, integer_zero_node);
8448 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8449 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8450 TREE_OPERAND (arg0, 0));
8451 if (TREE_CODE (arg0) == COMPLEX_CST)
8452 return fold_convert (type, TREE_IMAGPART (arg0));
8453 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8455 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8456 tem = fold_build2 (TREE_CODE (arg0), itype,
8457 fold_build1 (IMAGPART_EXPR, itype,
8458 TREE_OPERAND (arg0, 0)),
8459 fold_build1 (IMAGPART_EXPR, itype,
8460 TREE_OPERAND (arg0, 1)));
8461 return fold_convert (type, tem);
8463 if (TREE_CODE (arg0) == CONJ_EXPR)
8465 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8466 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8467 return fold_convert (type, negate_expr (tem));
8469 if (TREE_CODE (arg0) == CALL_EXPR)
8471 tree fn = get_callee_fndecl (arg0);
8472 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8473 switch (DECL_FUNCTION_CODE (fn))
8475 CASE_FLT_FN (BUILT_IN_CEXPI):
8476 fn = mathfn_built_in (type, BUILT_IN_SIN);
8478 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8489 } /* switch (code) */
8492 /* Fold a binary expression of code CODE and type TYPE with operands
8493 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8494 Return the folded expression if folding is successful. Otherwise,
8495 return NULL_TREE. */
8498 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8500 enum tree_code compl_code;
8502 if (code == MIN_EXPR)
8503 compl_code = MAX_EXPR;
8504 else if (code == MAX_EXPR)
8505 compl_code = MIN_EXPR;
8509 /* MIN (MAX (a, b), b) == b. */
8510 if (TREE_CODE (op0) == compl_code
8511 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8512 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8514 /* MIN (MAX (b, a), b) == b. */
8515 if (TREE_CODE (op0) == compl_code
8516 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8517 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8518 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8520 /* MIN (a, MAX (a, b)) == a. */
8521 if (TREE_CODE (op1) == compl_code
8522 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8523 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8524 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8526 /* MIN (a, MAX (b, a)) == a. */
8527 if (TREE_CODE (op1) == compl_code
8528 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8529 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8530 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8535 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8536 by changing CODE to reduce the magnitude of constants involved in
8537 ARG0 of the comparison.
8538 Returns a canonicalized comparison tree if a simplification was
8539 possible, otherwise returns NULL_TREE.
8540 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8541 valid if signed overflow is undefined. */
8544 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8545 tree arg0, tree arg1,
8546 bool *strict_overflow_p)
8548 enum tree_code code0 = TREE_CODE (arg0);
8549 tree t, cst0 = NULL_TREE;
8553 /* Match A +- CST code arg1 and CST code arg1. */
8554 if (!(((code0 == MINUS_EXPR
8555 || code0 == PLUS_EXPR)
8556 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8557 || code0 == INTEGER_CST))
8560 /* Identify the constant in arg0 and its sign. */
8561 if (code0 == INTEGER_CST)
8564 cst0 = TREE_OPERAND (arg0, 1);
8565 sgn0 = tree_int_cst_sgn (cst0);
8567 /* Overflowed constants and zero will cause problems. */
8568 if (integer_zerop (cst0)
8569 || TREE_OVERFLOW (cst0))
8572 /* See if we can reduce the magnitude of the constant in
8573 arg0 by changing the comparison code. */
8574 if (code0 == INTEGER_CST)
8576 /* CST <= arg1 -> CST-1 < arg1. */
8577 if (code == LE_EXPR && sgn0 == 1)
8579 /* -CST < arg1 -> -CST-1 <= arg1. */
8580 else if (code == LT_EXPR && sgn0 == -1)
8582 /* CST > arg1 -> CST-1 >= arg1. */
8583 else if (code == GT_EXPR && sgn0 == 1)
8585 /* -CST >= arg1 -> -CST-1 > arg1. */
8586 else if (code == GE_EXPR && sgn0 == -1)
8590 /* arg1 code' CST' might be more canonical. */
8595 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8597 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8599 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8600 else if (code == GT_EXPR
8601 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8603 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8604 else if (code == LE_EXPR
8605 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8607 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8608 else if (code == GE_EXPR
8609 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8613 *strict_overflow_p = true;
8616 /* Now build the constant reduced in magnitude. */
8617 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8618 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8619 if (code0 != INTEGER_CST)
8620 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8622 /* If swapping might yield to a more canonical form, do so. */
8624 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8626 return fold_build2 (code, type, t, arg1);
8629 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8630 overflow further. Try to decrease the magnitude of constants involved
8631 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8632 and put sole constants at the second argument position.
8633 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8636 maybe_canonicalize_comparison (enum tree_code code, tree type,
8637 tree arg0, tree arg1)
8640 bool strict_overflow_p;
8641 const char * const warnmsg = G_("assuming signed overflow does not occur "
8642 "when reducing constant in comparison");
8644 /* In principle pointers also have undefined overflow behavior,
8645 but that causes problems elsewhere. */
8646 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8647 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8650 /* Try canonicalization by simplifying arg0. */
8651 strict_overflow_p = false;
8652 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8653 &strict_overflow_p);
8656 if (strict_overflow_p)
8657 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8661 /* Try canonicalization by simplifying arg1 using the swapped
8663 code = swap_tree_comparison (code);
8664 strict_overflow_p = false;
8665 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8666 &strict_overflow_p);
8667 if (t && strict_overflow_p)
8668 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8672 /* Subroutine of fold_binary. This routine performs all of the
8673 transformations that are common to the equality/inequality
8674 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8675 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8676 fold_binary should call fold_binary. Fold a comparison with
8677 tree code CODE and type TYPE with operands OP0 and OP1. Return
8678 the folded comparison or NULL_TREE. */
8681 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8683 tree arg0, arg1, tem;
8688 STRIP_SIGN_NOPS (arg0);
8689 STRIP_SIGN_NOPS (arg1);
8691 tem = fold_relational_const (code, type, arg0, arg1);
8692 if (tem != NULL_TREE)
8695 /* If one arg is a real or integer constant, put it last. */
8696 if (tree_swap_operands_p (arg0, arg1, true))
8697 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8699 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8700 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8701 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8702 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8703 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8704 && (TREE_CODE (arg1) == INTEGER_CST
8705 && !TREE_OVERFLOW (arg1)))
8707 tree const1 = TREE_OPERAND (arg0, 1);
8709 tree variable = TREE_OPERAND (arg0, 0);
8712 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8714 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8715 TREE_TYPE (arg1), const2, const1);
8717 /* If the constant operation overflowed this can be
8718 simplified as a comparison against INT_MAX/INT_MIN. */
8719 if (TREE_CODE (lhs) == INTEGER_CST
8720 && TREE_OVERFLOW (lhs))
8722 int const1_sgn = tree_int_cst_sgn (const1);
8723 enum tree_code code2 = code;
8725 /* Get the sign of the constant on the lhs if the
8726 operation were VARIABLE + CONST1. */
8727 if (TREE_CODE (arg0) == MINUS_EXPR)
8728 const1_sgn = -const1_sgn;
8730 /* The sign of the constant determines if we overflowed
8731 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8732 Canonicalize to the INT_MIN overflow by swapping the comparison
8734 if (const1_sgn == -1)
8735 code2 = swap_tree_comparison (code);
8737 /* We now can look at the canonicalized case
8738 VARIABLE + 1 CODE2 INT_MIN
8739 and decide on the result. */
8740 if (code2 == LT_EXPR
8742 || code2 == EQ_EXPR)
8743 return omit_one_operand (type, boolean_false_node, variable);
8744 else if (code2 == NE_EXPR
8746 || code2 == GT_EXPR)
8747 return omit_one_operand (type, boolean_true_node, variable);
8750 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8751 && (TREE_CODE (lhs) != INTEGER_CST
8752 || !TREE_OVERFLOW (lhs)))
8754 fold_overflow_warning (("assuming signed overflow does not occur "
8755 "when changing X +- C1 cmp C2 to "
8757 WARN_STRICT_OVERFLOW_COMPARISON);
8758 return fold_build2 (code, type, variable, lhs);
8762 /* For comparisons of pointers we can decompose it to a compile time
8763 comparison of the base objects and the offsets into the object.
8764 This requires at least one operand being an ADDR_EXPR or a
8765 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8766 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8767 && (TREE_CODE (arg0) == ADDR_EXPR
8768 || TREE_CODE (arg1) == ADDR_EXPR
8769 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8770 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8772 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8773 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8774 enum machine_mode mode;
8775 int volatilep, unsignedp;
8776 bool indirect_base0 = false;
8778 /* Get base and offset for the access. Strip ADDR_EXPR for
8779 get_inner_reference, but put it back by stripping INDIRECT_REF
8780 off the base object if possible. */
8782 if (TREE_CODE (arg0) == ADDR_EXPR)
8784 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8785 &bitsize, &bitpos0, &offset0, &mode,
8786 &unsignedp, &volatilep, false);
8787 if (TREE_CODE (base0) == INDIRECT_REF)
8788 base0 = TREE_OPERAND (base0, 0);
8790 indirect_base0 = true;
8792 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8794 base0 = TREE_OPERAND (arg0, 0);
8795 offset0 = TREE_OPERAND (arg0, 1);
8799 if (TREE_CODE (arg1) == ADDR_EXPR)
8801 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8802 &bitsize, &bitpos1, &offset1, &mode,
8803 &unsignedp, &volatilep, false);
8804 /* We have to make sure to have an indirect/non-indirect base1
8805 just the same as we did for base0. */
8806 if (TREE_CODE (base1) == INDIRECT_REF
8808 base1 = TREE_OPERAND (base1, 0);
8809 else if (!indirect_base0)
8812 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8814 base1 = TREE_OPERAND (arg1, 0);
8815 offset1 = TREE_OPERAND (arg1, 1);
8817 else if (indirect_base0)
8820 /* If we have equivalent bases we might be able to simplify. */
8822 && operand_equal_p (base0, base1, 0))
8824 /* We can fold this expression to a constant if the non-constant
8825 offset parts are equal. */
8826 if (offset0 == offset1
8827 || (offset0 && offset1
8828 && operand_equal_p (offset0, offset1, 0)))
8833 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8835 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8837 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8839 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8841 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8843 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8847 /* We can simplify the comparison to a comparison of the variable
8848 offset parts if the constant offset parts are equal.
8849 Be careful to use signed size type here because otherwise we
8850 mess with array offsets in the wrong way. This is possible
8851 because pointer arithmetic is restricted to retain within an
8852 object and overflow on pointer differences is undefined as of
8853 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8854 else if (bitpos0 == bitpos1)
8856 tree signed_size_type_node;
8857 signed_size_type_node = signed_type_for (size_type_node);
8859 /* By converting to signed size type we cover middle-end pointer
8860 arithmetic which operates on unsigned pointer types of size
8861 type size and ARRAY_REF offsets which are properly sign or
8862 zero extended from their type in case it is narrower than
8864 if (offset0 == NULL_TREE)
8865 offset0 = build_int_cst (signed_size_type_node, 0);
8867 offset0 = fold_convert (signed_size_type_node, offset0);
8868 if (offset1 == NULL_TREE)
8869 offset1 = build_int_cst (signed_size_type_node, 0);
8871 offset1 = fold_convert (signed_size_type_node, offset1);
8873 return fold_build2 (code, type, offset0, offset1);
8878 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8879 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8880 the resulting offset is smaller in absolute value than the
8882 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8883 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8884 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8885 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8886 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8887 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8888 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8890 tree const1 = TREE_OPERAND (arg0, 1);
8891 tree const2 = TREE_OPERAND (arg1, 1);
8892 tree variable1 = TREE_OPERAND (arg0, 0);
8893 tree variable2 = TREE_OPERAND (arg1, 0);
8895 const char * const warnmsg = G_("assuming signed overflow does not "
8896 "occur when combining constants around "
8899 /* Put the constant on the side where it doesn't overflow and is
8900 of lower absolute value than before. */
8901 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8902 ? MINUS_EXPR : PLUS_EXPR,
8904 if (!TREE_OVERFLOW (cst)
8905 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8907 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8908 return fold_build2 (code, type,
8910 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8914 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8915 ? MINUS_EXPR : PLUS_EXPR,
8917 if (!TREE_OVERFLOW (cst)
8918 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8920 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8921 return fold_build2 (code, type,
8922 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8928 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8929 signed arithmetic case. That form is created by the compiler
8930 often enough for folding it to be of value. One example is in
8931 computing loop trip counts after Operator Strength Reduction. */
8932 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8933 && TREE_CODE (arg0) == MULT_EXPR
8934 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8935 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8936 && integer_zerop (arg1))
8938 tree const1 = TREE_OPERAND (arg0, 1);
8939 tree const2 = arg1; /* zero */
8940 tree variable1 = TREE_OPERAND (arg0, 0);
8941 enum tree_code cmp_code = code;
8943 gcc_assert (!integer_zerop (const1));
8945 fold_overflow_warning (("assuming signed overflow does not occur when "
8946 "eliminating multiplication in comparison "
8948 WARN_STRICT_OVERFLOW_COMPARISON);
8950 /* If const1 is negative we swap the sense of the comparison. */
8951 if (tree_int_cst_sgn (const1) < 0)
8952 cmp_code = swap_tree_comparison (cmp_code);
8954 return fold_build2 (cmp_code, type, variable1, const2);
8957 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8961 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8963 tree targ0 = strip_float_extensions (arg0);
8964 tree targ1 = strip_float_extensions (arg1);
8965 tree newtype = TREE_TYPE (targ0);
8967 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8968 newtype = TREE_TYPE (targ1);
8970 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8971 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8972 return fold_build2 (code, type, fold_convert (newtype, targ0),
8973 fold_convert (newtype, targ1));
8975 /* (-a) CMP (-b) -> b CMP a */
8976 if (TREE_CODE (arg0) == NEGATE_EXPR
8977 && TREE_CODE (arg1) == NEGATE_EXPR)
8978 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8979 TREE_OPERAND (arg0, 0));
8981 if (TREE_CODE (arg1) == REAL_CST)
8983 REAL_VALUE_TYPE cst;
8984 cst = TREE_REAL_CST (arg1);
8986 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8987 if (TREE_CODE (arg0) == NEGATE_EXPR)
8988 return fold_build2 (swap_tree_comparison (code), type,
8989 TREE_OPERAND (arg0, 0),
8990 build_real (TREE_TYPE (arg1),
8991 REAL_VALUE_NEGATE (cst)));
8993 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8994 /* a CMP (-0) -> a CMP 0 */
8995 if (REAL_VALUE_MINUS_ZERO (cst))
8996 return fold_build2 (code, type, arg0,
8997 build_real (TREE_TYPE (arg1), dconst0));
8999 /* x != NaN is always true, other ops are always false. */
9000 if (REAL_VALUE_ISNAN (cst)
9001 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9003 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9004 return omit_one_operand (type, tem, arg0);
9007 /* Fold comparisons against infinity. */
9008 if (REAL_VALUE_ISINF (cst))
9010 tem = fold_inf_compare (code, type, arg0, arg1);
9011 if (tem != NULL_TREE)
9016 /* If this is a comparison of a real constant with a PLUS_EXPR
9017 or a MINUS_EXPR of a real constant, we can convert it into a
9018 comparison with a revised real constant as long as no overflow
9019 occurs when unsafe_math_optimizations are enabled. */
9020 if (flag_unsafe_math_optimizations
9021 && TREE_CODE (arg1) == REAL_CST
9022 && (TREE_CODE (arg0) == PLUS_EXPR
9023 || TREE_CODE (arg0) == MINUS_EXPR)
9024 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9025 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9026 ? MINUS_EXPR : PLUS_EXPR,
9027 arg1, TREE_OPERAND (arg0, 1), 0))
9028 && !TREE_OVERFLOW (tem))
9029 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9031 /* Likewise, we can simplify a comparison of a real constant with
9032 a MINUS_EXPR whose first operand is also a real constant, i.e.
9033 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9034 floating-point types only if -fassociative-math is set. */
9035 if (flag_associative_math
9036 && TREE_CODE (arg1) == REAL_CST
9037 && TREE_CODE (arg0) == MINUS_EXPR
9038 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9039 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9041 && !TREE_OVERFLOW (tem))
9042 return fold_build2 (swap_tree_comparison (code), type,
9043 TREE_OPERAND (arg0, 1), tem);
9045 /* Fold comparisons against built-in math functions. */
9046 if (TREE_CODE (arg1) == REAL_CST
9047 && flag_unsafe_math_optimizations
9048 && ! flag_errno_math)
9050 enum built_in_function fcode = builtin_mathfn_code (arg0);
9052 if (fcode != END_BUILTINS)
9054 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9055 if (tem != NULL_TREE)
9061 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9062 && (TREE_CODE (arg0) == NOP_EXPR
9063 || TREE_CODE (arg0) == CONVERT_EXPR))
9065 /* If we are widening one operand of an integer comparison,
9066 see if the other operand is similarly being widened. Perhaps we
9067 can do the comparison in the narrower type. */
9068 tem = fold_widened_comparison (code, type, arg0, arg1);
9072 /* Or if we are changing signedness. */
9073 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9078 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9079 constant, we can simplify it. */
9080 if (TREE_CODE (arg1) == INTEGER_CST
9081 && (TREE_CODE (arg0) == MIN_EXPR
9082 || TREE_CODE (arg0) == MAX_EXPR)
9083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9085 tem = optimize_minmax_comparison (code, type, op0, op1);
9090 /* Simplify comparison of something with itself. (For IEEE
9091 floating-point, we can only do some of these simplifications.) */
9092 if (operand_equal_p (arg0, arg1, 0))
9097 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9098 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9099 return constant_boolean_node (1, type);
9104 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9105 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9106 return constant_boolean_node (1, type);
9107 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9110 /* For NE, we can only do this simplification if integer
9111 or we don't honor IEEE floating point NaNs. */
9112 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9113 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9115 /* ... fall through ... */
9118 return constant_boolean_node (0, type);
9124 /* If we are comparing an expression that just has comparisons
9125 of two integer values, arithmetic expressions of those comparisons,
9126 and constants, we can simplify it. There are only three cases
9127 to check: the two values can either be equal, the first can be
9128 greater, or the second can be greater. Fold the expression for
9129 those three values. Since each value must be 0 or 1, we have
9130 eight possibilities, each of which corresponds to the constant 0
9131 or 1 or one of the six possible comparisons.
9133 This handles common cases like (a > b) == 0 but also handles
9134 expressions like ((x > y) - (y > x)) > 0, which supposedly
9135 occur in macroized code. */
9137 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9139 tree cval1 = 0, cval2 = 0;
9142 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9143 /* Don't handle degenerate cases here; they should already
9144 have been handled anyway. */
9145 && cval1 != 0 && cval2 != 0
9146 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9147 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9148 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9149 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9150 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9151 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9152 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9154 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9155 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9157 /* We can't just pass T to eval_subst in case cval1 or cval2
9158 was the same as ARG1. */
9161 = fold_build2 (code, type,
9162 eval_subst (arg0, cval1, maxval,
9166 = fold_build2 (code, type,
9167 eval_subst (arg0, cval1, maxval,
9171 = fold_build2 (code, type,
9172 eval_subst (arg0, cval1, minval,
9176 /* All three of these results should be 0 or 1. Confirm they are.
9177 Then use those values to select the proper code to use. */
9179 if (TREE_CODE (high_result) == INTEGER_CST
9180 && TREE_CODE (equal_result) == INTEGER_CST
9181 && TREE_CODE (low_result) == INTEGER_CST)
9183 /* Make a 3-bit mask with the high-order bit being the
9184 value for `>', the next for '=', and the low for '<'. */
9185 switch ((integer_onep (high_result) * 4)
9186 + (integer_onep (equal_result) * 2)
9187 + integer_onep (low_result))
9191 return omit_one_operand (type, integer_zero_node, arg0);
9212 return omit_one_operand (type, integer_one_node, arg0);
9216 return save_expr (build2 (code, type, cval1, cval2));
9217 return fold_build2 (code, type, cval1, cval2);
9222 /* Fold a comparison of the address of COMPONENT_REFs with the same
9223 type and component to a comparison of the address of the base
9224 object. In short, &x->a OP &y->a to x OP y and
9225 &x->a OP &y.a to x OP &y */
9226 if (TREE_CODE (arg0) == ADDR_EXPR
9227 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9228 && TREE_CODE (arg1) == ADDR_EXPR
9229 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9231 tree cref0 = TREE_OPERAND (arg0, 0);
9232 tree cref1 = TREE_OPERAND (arg1, 0);
9233 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9235 tree op0 = TREE_OPERAND (cref0, 0);
9236 tree op1 = TREE_OPERAND (cref1, 0);
9237 return fold_build2 (code, type,
9238 fold_addr_expr (op0),
9239 fold_addr_expr (op1));
9243 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9244 into a single range test. */
9245 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9246 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9247 && TREE_CODE (arg1) == INTEGER_CST
9248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9249 && !integer_zerop (TREE_OPERAND (arg0, 1))
9250 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9251 && !TREE_OVERFLOW (arg1))
9253 tem = fold_div_compare (code, type, arg0, arg1);
9254 if (tem != NULL_TREE)
9258 /* Fold ~X op ~Y as Y op X. */
9259 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9260 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9262 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9263 return fold_build2 (code, type,
9264 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9265 TREE_OPERAND (arg0, 0));
9268 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9269 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9270 && TREE_CODE (arg1) == INTEGER_CST)
9272 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9273 return fold_build2 (swap_tree_comparison (code), type,
9274 TREE_OPERAND (arg0, 0),
9275 fold_build1 (BIT_NOT_EXPR, cmp_type,
9276 fold_convert (cmp_type, arg1)));
9283 /* Subroutine of fold_binary. Optimize complex multiplications of the
9284 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9285 argument EXPR represents the expression "z" of type TYPE. */
9288 fold_mult_zconjz (tree type, tree expr)
9290 tree itype = TREE_TYPE (type);
9291 tree rpart, ipart, tem;
9293 if (TREE_CODE (expr) == COMPLEX_EXPR)
9295 rpart = TREE_OPERAND (expr, 0);
9296 ipart = TREE_OPERAND (expr, 1);
9298 else if (TREE_CODE (expr) == COMPLEX_CST)
9300 rpart = TREE_REALPART (expr);
9301 ipart = TREE_IMAGPART (expr);
9305 expr = save_expr (expr);
9306 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9307 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9310 rpart = save_expr (rpart);
9311 ipart = save_expr (ipart);
9312 tem = fold_build2 (PLUS_EXPR, itype,
9313 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9314 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9315 return fold_build2 (COMPLEX_EXPR, type, tem,
9316 fold_convert (itype, integer_zero_node));
9320 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9321 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9322 guarantees that P and N have the same least significant log2(M) bits.
9323 N is not otherwise constrained. In particular, N is not normalized to
9324 0 <= N < M as is common. In general, the precise value of P is unknown.
9325 M is chosen as large as possible such that constant N can be determined.
9327 Returns M and sets *RESIDUE to N. */
9329 static unsigned HOST_WIDE_INT
9330 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9332 enum tree_code code;
9336 code = TREE_CODE (expr);
9337 if (code == ADDR_EXPR)
9339 expr = TREE_OPERAND (expr, 0);
9340 if (handled_component_p (expr))
9342 HOST_WIDE_INT bitsize, bitpos;
9344 enum machine_mode mode;
9345 int unsignedp, volatilep;
9347 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9348 &mode, &unsignedp, &volatilep, false);
9349 *residue = bitpos / BITS_PER_UNIT;
9352 if (TREE_CODE (offset) == INTEGER_CST)
9353 *residue += TREE_INT_CST_LOW (offset);
9355 /* We don't handle more complicated offset expressions. */
9361 return DECL_ALIGN_UNIT (expr);
9363 else if (code == POINTER_PLUS_EXPR)
9366 unsigned HOST_WIDE_INT modulus;
9367 enum tree_code inner_code;
9369 op0 = TREE_OPERAND (expr, 0);
9371 modulus = get_pointer_modulus_and_residue (op0, residue);
9373 op1 = TREE_OPERAND (expr, 1);
9375 inner_code = TREE_CODE (op1);
9376 if (inner_code == INTEGER_CST)
9378 *residue += TREE_INT_CST_LOW (op1);
9381 else if (inner_code == MULT_EXPR)
9383 op1 = TREE_OPERAND (op1, 1);
9384 if (TREE_CODE (op1) == INTEGER_CST)
9386 unsigned HOST_WIDE_INT align;
9388 /* Compute the greatest power-of-2 divisor of op1. */
9389 align = TREE_INT_CST_LOW (op1);
9392 /* If align is non-zero and less than *modulus, replace
9393 *modulus with align., If align is 0, then either op1 is 0
9394 or the greatest power-of-2 divisor of op1 doesn't fit in an
9395 unsigned HOST_WIDE_INT. In either case, no additional
9396 constraint is imposed. */
9398 modulus = MIN (modulus, align);
9405 /* If we get here, we were unable to determine anything useful about the
9411 /* Fold a binary expression of code CODE and type TYPE with operands
9412 OP0 and OP1. Return the folded expression if folding is
9413 successful. Otherwise, return NULL_TREE. */
9416 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9418 enum tree_code_class kind = TREE_CODE_CLASS (code);
9419 tree arg0, arg1, tem;
9420 tree t1 = NULL_TREE;
9421 bool strict_overflow_p;
9423 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9424 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9425 && TREE_CODE_LENGTH (code) == 2
9427 && op1 != NULL_TREE);
9432 /* Strip any conversions that don't change the mode. This is
9433 safe for every expression, except for a comparison expression
9434 because its signedness is derived from its operands. So, in
9435 the latter case, only strip conversions that don't change the
9438 Note that this is done as an internal manipulation within the
9439 constant folder, in order to find the simplest representation
9440 of the arguments so that their form can be studied. In any
9441 cases, the appropriate type conversions should be put back in
9442 the tree that will get out of the constant folder. */
9444 if (kind == tcc_comparison)
9446 STRIP_SIGN_NOPS (arg0);
9447 STRIP_SIGN_NOPS (arg1);
9455 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9456 constant but we can't do arithmetic on them. */
9457 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9458 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9459 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9460 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9461 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9462 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9464 if (kind == tcc_binary)
9466 /* Make sure type and arg0 have the same saturating flag. */
9467 gcc_assert (TYPE_SATURATING (type)
9468 == TYPE_SATURATING (TREE_TYPE (arg0)));
9469 tem = const_binop (code, arg0, arg1, 0);
9471 else if (kind == tcc_comparison)
9472 tem = fold_relational_const (code, type, arg0, arg1);
9476 if (tem != NULL_TREE)
9478 if (TREE_TYPE (tem) != type)
9479 tem = fold_convert (type, tem);
9484 /* If this is a commutative operation, and ARG0 is a constant, move it
9485 to ARG1 to reduce the number of tests below. */
9486 if (commutative_tree_code (code)
9487 && tree_swap_operands_p (arg0, arg1, true))
9488 return fold_build2 (code, type, op1, op0);
9490 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9492 First check for cases where an arithmetic operation is applied to a
9493 compound, conditional, or comparison operation. Push the arithmetic
9494 operation inside the compound or conditional to see if any folding
9495 can then be done. Convert comparison to conditional for this purpose.
9496 The also optimizes non-constant cases that used to be done in
9499 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9500 one of the operands is a comparison and the other is a comparison, a
9501 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9502 code below would make the expression more complex. Change it to a
9503 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9504 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9506 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9507 || code == EQ_EXPR || code == NE_EXPR)
9508 && ((truth_value_p (TREE_CODE (arg0))
9509 && (truth_value_p (TREE_CODE (arg1))
9510 || (TREE_CODE (arg1) == BIT_AND_EXPR
9511 && integer_onep (TREE_OPERAND (arg1, 1)))))
9512 || (truth_value_p (TREE_CODE (arg1))
9513 && (truth_value_p (TREE_CODE (arg0))
9514 || (TREE_CODE (arg0) == BIT_AND_EXPR
9515 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9517 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9518 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9521 fold_convert (boolean_type_node, arg0),
9522 fold_convert (boolean_type_node, arg1));
9524 if (code == EQ_EXPR)
9525 tem = invert_truthvalue (tem);
9527 return fold_convert (type, tem);
9530 if (TREE_CODE_CLASS (code) == tcc_binary
9531 || TREE_CODE_CLASS (code) == tcc_comparison)
9533 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9534 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9535 fold_build2 (code, type,
9536 fold_convert (TREE_TYPE (op0),
9537 TREE_OPERAND (arg0, 1)),
9539 if (TREE_CODE (arg1) == COMPOUND_EXPR
9540 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9541 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9542 fold_build2 (code, type, op0,
9543 fold_convert (TREE_TYPE (op1),
9544 TREE_OPERAND (arg1, 1))));
9546 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9548 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9550 /*cond_first_p=*/1);
9551 if (tem != NULL_TREE)
9555 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9557 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9559 /*cond_first_p=*/0);
9560 if (tem != NULL_TREE)
9567 case POINTER_PLUS_EXPR:
9568 /* 0 +p index -> (type)index */
9569 if (integer_zerop (arg0))
9570 return non_lvalue (fold_convert (type, arg1));
9572 /* PTR +p 0 -> PTR */
9573 if (integer_zerop (arg1))
9574 return non_lvalue (fold_convert (type, arg0));
9576 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9577 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9578 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9579 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9580 fold_convert (sizetype, arg1),
9581 fold_convert (sizetype, arg0)));
9583 /* index +p PTR -> PTR +p index */
9584 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9585 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9586 return fold_build2 (POINTER_PLUS_EXPR, type,
9587 fold_convert (type, arg1),
9588 fold_convert (sizetype, arg0));
9590 /* (PTR +p B) +p A -> PTR +p (B + A) */
9591 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9594 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9595 tree arg00 = TREE_OPERAND (arg0, 0);
9596 inner = fold_build2 (PLUS_EXPR, sizetype,
9597 arg01, fold_convert (sizetype, arg1));
9598 return fold_convert (type,
9599 fold_build2 (POINTER_PLUS_EXPR,
9600 TREE_TYPE (arg00), arg00, inner));
9603 /* PTR_CST +p CST -> CST1 */
9604 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9605 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9607 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9608 of the array. Loop optimizer sometimes produce this type of
9610 if (TREE_CODE (arg0) == ADDR_EXPR)
9612 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9614 return fold_convert (type, tem);
9620 /* PTR + INT -> (INT)(PTR p+ INT) */
9621 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9622 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9623 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9626 fold_convert (sizetype, arg1)));
9627 /* INT + PTR -> (INT)(PTR p+ INT) */
9628 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9629 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9630 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9633 fold_convert (sizetype, arg0)));
9634 /* A + (-B) -> A - B */
9635 if (TREE_CODE (arg1) == NEGATE_EXPR)
9636 return fold_build2 (MINUS_EXPR, type,
9637 fold_convert (type, arg0),
9638 fold_convert (type, TREE_OPERAND (arg1, 0)));
9639 /* (-A) + B -> B - A */
9640 if (TREE_CODE (arg0) == NEGATE_EXPR
9641 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9642 return fold_build2 (MINUS_EXPR, type,
9643 fold_convert (type, arg1),
9644 fold_convert (type, TREE_OPERAND (arg0, 0)));
9646 if (INTEGRAL_TYPE_P (type))
9648 /* Convert ~A + 1 to -A. */
9649 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9650 && integer_onep (arg1))
9651 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9654 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9655 && !TYPE_OVERFLOW_TRAPS (type))
9657 tree tem = TREE_OPERAND (arg0, 0);
9660 if (operand_equal_p (tem, arg1, 0))
9662 t1 = build_int_cst_type (type, -1);
9663 return omit_one_operand (type, t1, arg1);
9668 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9669 && !TYPE_OVERFLOW_TRAPS (type))
9671 tree tem = TREE_OPERAND (arg1, 0);
9674 if (operand_equal_p (arg0, tem, 0))
9676 t1 = build_int_cst_type (type, -1);
9677 return omit_one_operand (type, t1, arg0);
9681 /* X + (X / CST) * -CST is X % CST. */
9682 if (TREE_CODE (arg1) == MULT_EXPR
9683 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9684 && operand_equal_p (arg0,
9685 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9687 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9688 tree cst1 = TREE_OPERAND (arg1, 1);
9689 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9690 if (sum && integer_zerop (sum))
9691 return fold_convert (type,
9692 fold_build2 (TRUNC_MOD_EXPR,
9693 TREE_TYPE (arg0), arg0, cst0));
9697 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9698 same or one. Make sure type is not saturating.
9699 fold_plusminus_mult_expr will re-associate. */
9700 if ((TREE_CODE (arg0) == MULT_EXPR
9701 || TREE_CODE (arg1) == MULT_EXPR)
9702 && !TYPE_SATURATING (type)
9703 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9705 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9710 if (! FLOAT_TYPE_P (type))
9712 if (integer_zerop (arg1))
9713 return non_lvalue (fold_convert (type, arg0));
9715 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9716 with a constant, and the two constants have no bits in common,
9717 we should treat this as a BIT_IOR_EXPR since this may produce more
9719 if (TREE_CODE (arg0) == BIT_AND_EXPR
9720 && TREE_CODE (arg1) == BIT_AND_EXPR
9721 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9722 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9723 && integer_zerop (const_binop (BIT_AND_EXPR,
9724 TREE_OPERAND (arg0, 1),
9725 TREE_OPERAND (arg1, 1), 0)))
9727 code = BIT_IOR_EXPR;
9731 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9732 (plus (plus (mult) (mult)) (foo)) so that we can
9733 take advantage of the factoring cases below. */
9734 if (((TREE_CODE (arg0) == PLUS_EXPR
9735 || TREE_CODE (arg0) == MINUS_EXPR)
9736 && TREE_CODE (arg1) == MULT_EXPR)
9737 || ((TREE_CODE (arg1) == PLUS_EXPR
9738 || TREE_CODE (arg1) == MINUS_EXPR)
9739 && TREE_CODE (arg0) == MULT_EXPR))
9741 tree parg0, parg1, parg, marg;
9742 enum tree_code pcode;
9744 if (TREE_CODE (arg1) == MULT_EXPR)
9745 parg = arg0, marg = arg1;
9747 parg = arg1, marg = arg0;
9748 pcode = TREE_CODE (parg);
9749 parg0 = TREE_OPERAND (parg, 0);
9750 parg1 = TREE_OPERAND (parg, 1);
9754 if (TREE_CODE (parg0) == MULT_EXPR
9755 && TREE_CODE (parg1) != MULT_EXPR)
9756 return fold_build2 (pcode, type,
9757 fold_build2 (PLUS_EXPR, type,
9758 fold_convert (type, parg0),
9759 fold_convert (type, marg)),
9760 fold_convert (type, parg1));
9761 if (TREE_CODE (parg0) != MULT_EXPR
9762 && TREE_CODE (parg1) == MULT_EXPR)
9763 return fold_build2 (PLUS_EXPR, type,
9764 fold_convert (type, parg0),
9765 fold_build2 (pcode, type,
9766 fold_convert (type, marg),
9773 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9774 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9775 return non_lvalue (fold_convert (type, arg0));
9777 /* Likewise if the operands are reversed. */
9778 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9779 return non_lvalue (fold_convert (type, arg1));
9781 /* Convert X + -C into X - C. */
9782 if (TREE_CODE (arg1) == REAL_CST
9783 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9785 tem = fold_negate_const (arg1, type);
9786 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9787 return fold_build2 (MINUS_EXPR, type,
9788 fold_convert (type, arg0),
9789 fold_convert (type, tem));
9792 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9793 to __complex__ ( x, y ). This is not the same for SNaNs or
9794 if signed zeros are involved. */
9795 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9796 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9797 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9799 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9800 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9801 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9802 bool arg0rz = false, arg0iz = false;
9803 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9804 || (arg0i && (arg0iz = real_zerop (arg0i))))
9806 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9807 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9808 if (arg0rz && arg1i && real_zerop (arg1i))
9810 tree rp = arg1r ? arg1r
9811 : build1 (REALPART_EXPR, rtype, arg1);
9812 tree ip = arg0i ? arg0i
9813 : build1 (IMAGPART_EXPR, rtype, arg0);
9814 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9816 else if (arg0iz && arg1r && real_zerop (arg1r))
9818 tree rp = arg0r ? arg0r
9819 : build1 (REALPART_EXPR, rtype, arg0);
9820 tree ip = arg1i ? arg1i
9821 : build1 (IMAGPART_EXPR, rtype, arg1);
9822 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9827 if (flag_unsafe_math_optimizations
9828 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9829 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9830 && (tem = distribute_real_division (code, type, arg0, arg1)))
9833 /* Convert x+x into x*2.0. */
9834 if (operand_equal_p (arg0, arg1, 0)
9835 && SCALAR_FLOAT_TYPE_P (type))
9836 return fold_build2 (MULT_EXPR, type, arg0,
9837 build_real (type, dconst2));
9839 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9840 We associate floats only if the user has specified
9841 -fassociative-math. */
9842 if (flag_associative_math
9843 && TREE_CODE (arg1) == PLUS_EXPR
9844 && TREE_CODE (arg0) != MULT_EXPR)
9846 tree tree10 = TREE_OPERAND (arg1, 0);
9847 tree tree11 = TREE_OPERAND (arg1, 1);
9848 if (TREE_CODE (tree11) == MULT_EXPR
9849 && TREE_CODE (tree10) == MULT_EXPR)
9852 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9853 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9856 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9857 We associate floats only if the user has specified
9858 -fassociative-math. */
9859 if (flag_associative_math
9860 && TREE_CODE (arg0) == PLUS_EXPR
9861 && TREE_CODE (arg1) != MULT_EXPR)
9863 tree tree00 = TREE_OPERAND (arg0, 0);
9864 tree tree01 = TREE_OPERAND (arg0, 1);
9865 if (TREE_CODE (tree01) == MULT_EXPR
9866 && TREE_CODE (tree00) == MULT_EXPR)
9869 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9870 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9876 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9877 is a rotate of A by C1 bits. */
9878 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9879 is a rotate of A by B bits. */
9881 enum tree_code code0, code1;
9882 code0 = TREE_CODE (arg0);
9883 code1 = TREE_CODE (arg1);
9884 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9885 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9886 && operand_equal_p (TREE_OPERAND (arg0, 0),
9887 TREE_OPERAND (arg1, 0), 0)
9888 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9890 tree tree01, tree11;
9891 enum tree_code code01, code11;
9893 tree01 = TREE_OPERAND (arg0, 1);
9894 tree11 = TREE_OPERAND (arg1, 1);
9895 STRIP_NOPS (tree01);
9896 STRIP_NOPS (tree11);
9897 code01 = TREE_CODE (tree01);
9898 code11 = TREE_CODE (tree11);
9899 if (code01 == INTEGER_CST
9900 && code11 == INTEGER_CST
9901 && TREE_INT_CST_HIGH (tree01) == 0
9902 && TREE_INT_CST_HIGH (tree11) == 0
9903 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9904 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9905 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9906 code0 == LSHIFT_EXPR ? tree01 : tree11);
9907 else if (code11 == MINUS_EXPR)
9909 tree tree110, tree111;
9910 tree110 = TREE_OPERAND (tree11, 0);
9911 tree111 = TREE_OPERAND (tree11, 1);
9912 STRIP_NOPS (tree110);
9913 STRIP_NOPS (tree111);
9914 if (TREE_CODE (tree110) == INTEGER_CST
9915 && 0 == compare_tree_int (tree110,
9917 (TREE_TYPE (TREE_OPERAND
9919 && operand_equal_p (tree01, tree111, 0))
9920 return build2 ((code0 == LSHIFT_EXPR
9923 type, TREE_OPERAND (arg0, 0), tree01);
9925 else if (code01 == MINUS_EXPR)
9927 tree tree010, tree011;
9928 tree010 = TREE_OPERAND (tree01, 0);
9929 tree011 = TREE_OPERAND (tree01, 1);
9930 STRIP_NOPS (tree010);
9931 STRIP_NOPS (tree011);
9932 if (TREE_CODE (tree010) == INTEGER_CST
9933 && 0 == compare_tree_int (tree010,
9935 (TREE_TYPE (TREE_OPERAND
9937 && operand_equal_p (tree11, tree011, 0))
9938 return build2 ((code0 != LSHIFT_EXPR
9941 type, TREE_OPERAND (arg0, 0), tree11);
9947 /* In most languages, can't associate operations on floats through
9948 parentheses. Rather than remember where the parentheses were, we
9949 don't associate floats at all, unless the user has specified
9951 And, we need to make sure type is not saturating. */
9953 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9954 && !TYPE_SATURATING (type))
9956 tree var0, con0, lit0, minus_lit0;
9957 tree var1, con1, lit1, minus_lit1;
9960 /* Split both trees into variables, constants, and literals. Then
9961 associate each group together, the constants with literals,
9962 then the result with variables. This increases the chances of
9963 literals being recombined later and of generating relocatable
9964 expressions for the sum of a constant and literal. */
9965 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9966 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9967 code == MINUS_EXPR);
9969 /* With undefined overflow we can only associate constants
9970 with one variable. */
9971 if ((POINTER_TYPE_P (type)
9972 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9978 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9979 tmp0 = TREE_OPERAND (tmp0, 0);
9980 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9981 tmp1 = TREE_OPERAND (tmp1, 0);
9982 /* The only case we can still associate with two variables
9983 is if they are the same, modulo negation. */
9984 if (!operand_equal_p (tmp0, tmp1, 0))
9988 /* Only do something if we found more than two objects. Otherwise,
9989 nothing has changed and we risk infinite recursion. */
9991 && (2 < ((var0 != 0) + (var1 != 0)
9992 + (con0 != 0) + (con1 != 0)
9993 + (lit0 != 0) + (lit1 != 0)
9994 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9996 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9997 if (code == MINUS_EXPR)
10000 var0 = associate_trees (var0, var1, code, type);
10001 con0 = associate_trees (con0, con1, code, type);
10002 lit0 = associate_trees (lit0, lit1, code, type);
10003 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10005 /* Preserve the MINUS_EXPR if the negative part of the literal is
10006 greater than the positive part. Otherwise, the multiplicative
10007 folding code (i.e extract_muldiv) may be fooled in case
10008 unsigned constants are subtracted, like in the following
10009 example: ((X*2 + 4) - 8U)/2. */
10010 if (minus_lit0 && lit0)
10012 if (TREE_CODE (lit0) == INTEGER_CST
10013 && TREE_CODE (minus_lit0) == INTEGER_CST
10014 && tree_int_cst_lt (lit0, minus_lit0))
10016 minus_lit0 = associate_trees (minus_lit0, lit0,
10022 lit0 = associate_trees (lit0, minus_lit0,
10030 return fold_convert (type,
10031 associate_trees (var0, minus_lit0,
10032 MINUS_EXPR, type));
10035 con0 = associate_trees (con0, minus_lit0,
10037 return fold_convert (type,
10038 associate_trees (var0, con0,
10043 con0 = associate_trees (con0, lit0, code, type);
10044 return fold_convert (type, associate_trees (var0, con0,
10052 /* Pointer simplifications for subtraction, simple reassociations. */
10053 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10055 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10056 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10057 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10059 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10060 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10061 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10062 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10063 return fold_build2 (PLUS_EXPR, type,
10064 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10065 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10067 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10068 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10070 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10071 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10072 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10074 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10077 /* A - (-B) -> A + B */
10078 if (TREE_CODE (arg1) == NEGATE_EXPR)
10079 return fold_build2 (PLUS_EXPR, type, op0,
10080 fold_convert (type, TREE_OPERAND (arg1, 0)));
10081 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10082 if (TREE_CODE (arg0) == NEGATE_EXPR
10083 && (FLOAT_TYPE_P (type)
10084 || INTEGRAL_TYPE_P (type))
10085 && negate_expr_p (arg1)
10086 && reorder_operands_p (arg0, arg1))
10087 return fold_build2 (MINUS_EXPR, type,
10088 fold_convert (type, negate_expr (arg1)),
10089 fold_convert (type, TREE_OPERAND (arg0, 0)));
10090 /* Convert -A - 1 to ~A. */
10091 if (INTEGRAL_TYPE_P (type)
10092 && TREE_CODE (arg0) == NEGATE_EXPR
10093 && integer_onep (arg1)
10094 && !TYPE_OVERFLOW_TRAPS (type))
10095 return fold_build1 (BIT_NOT_EXPR, type,
10096 fold_convert (type, TREE_OPERAND (arg0, 0)));
10098 /* Convert -1 - A to ~A. */
10099 if (INTEGRAL_TYPE_P (type)
10100 && integer_all_onesp (arg0))
10101 return fold_build1 (BIT_NOT_EXPR, type, op1);
10104 /* X - (X / CST) * CST is X % CST. */
10105 if (INTEGRAL_TYPE_P (type)
10106 && TREE_CODE (arg1) == MULT_EXPR
10107 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10108 && operand_equal_p (arg0,
10109 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10110 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10111 TREE_OPERAND (arg1, 1), 0))
10112 return fold_convert (type,
10113 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10114 arg0, TREE_OPERAND (arg1, 1)));
10116 if (! FLOAT_TYPE_P (type))
10118 if (integer_zerop (arg0))
10119 return negate_expr (fold_convert (type, arg1));
10120 if (integer_zerop (arg1))
10121 return non_lvalue (fold_convert (type, arg0));
10123 /* Fold A - (A & B) into ~B & A. */
10124 if (!TREE_SIDE_EFFECTS (arg0)
10125 && TREE_CODE (arg1) == BIT_AND_EXPR)
10127 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10129 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10130 return fold_build2 (BIT_AND_EXPR, type,
10131 fold_build1 (BIT_NOT_EXPR, type, arg10),
10132 fold_convert (type, arg0));
10134 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10136 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10137 return fold_build2 (BIT_AND_EXPR, type,
10138 fold_build1 (BIT_NOT_EXPR, type, arg11),
10139 fold_convert (type, arg0));
10143 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10144 any power of 2 minus 1. */
10145 if (TREE_CODE (arg0) == BIT_AND_EXPR
10146 && TREE_CODE (arg1) == BIT_AND_EXPR
10147 && operand_equal_p (TREE_OPERAND (arg0, 0),
10148 TREE_OPERAND (arg1, 0), 0))
10150 tree mask0 = TREE_OPERAND (arg0, 1);
10151 tree mask1 = TREE_OPERAND (arg1, 1);
10152 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10154 if (operand_equal_p (tem, mask1, 0))
10156 tem = fold_build2 (BIT_XOR_EXPR, type,
10157 TREE_OPERAND (arg0, 0), mask1);
10158 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10163 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10164 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10165 return non_lvalue (fold_convert (type, arg0));
10167 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10168 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10169 (-ARG1 + ARG0) reduces to -ARG1. */
10170 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10171 return negate_expr (fold_convert (type, arg1));
10173 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10174 __complex__ ( x, -y ). This is not the same for SNaNs or if
10175 signed zeros are involved. */
10176 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10177 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10178 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10180 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10181 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10182 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10183 bool arg0rz = false, arg0iz = false;
10184 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10185 || (arg0i && (arg0iz = real_zerop (arg0i))))
10187 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10188 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10189 if (arg0rz && arg1i && real_zerop (arg1i))
10191 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10193 : build1 (REALPART_EXPR, rtype, arg1));
10194 tree ip = arg0i ? arg0i
10195 : build1 (IMAGPART_EXPR, rtype, arg0);
10196 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10198 else if (arg0iz && arg1r && real_zerop (arg1r))
10200 tree rp = arg0r ? arg0r
10201 : build1 (REALPART_EXPR, rtype, arg0);
10202 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10204 : build1 (IMAGPART_EXPR, rtype, arg1));
10205 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10210 /* Fold &x - &x. This can happen from &x.foo - &x.
10211 This is unsafe for certain floats even in non-IEEE formats.
10212 In IEEE, it is unsafe because it does wrong for NaNs.
10213 Also note that operand_equal_p is always false if an operand
10216 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10217 && operand_equal_p (arg0, arg1, 0))
10218 return fold_convert (type, integer_zero_node);
10220 /* A - B -> A + (-B) if B is easily negatable. */
10221 if (negate_expr_p (arg1)
10222 && ((FLOAT_TYPE_P (type)
10223 /* Avoid this transformation if B is a positive REAL_CST. */
10224 && (TREE_CODE (arg1) != REAL_CST
10225 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10226 || INTEGRAL_TYPE_P (type)))
10227 return fold_build2 (PLUS_EXPR, type,
10228 fold_convert (type, arg0),
10229 fold_convert (type, negate_expr (arg1)));
10231 /* Try folding difference of addresses. */
10233 HOST_WIDE_INT diff;
10235 if ((TREE_CODE (arg0) == ADDR_EXPR
10236 || TREE_CODE (arg1) == ADDR_EXPR)
10237 && ptr_difference_const (arg0, arg1, &diff))
10238 return build_int_cst_type (type, diff);
10241 /* Fold &a[i] - &a[j] to i-j. */
10242 if (TREE_CODE (arg0) == ADDR_EXPR
10243 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10244 && TREE_CODE (arg1) == ADDR_EXPR
10245 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10247 tree aref0 = TREE_OPERAND (arg0, 0);
10248 tree aref1 = TREE_OPERAND (arg1, 0);
10249 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10250 TREE_OPERAND (aref1, 0), 0))
10252 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10253 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10254 tree esz = array_ref_element_size (aref0);
10255 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10256 return fold_build2 (MULT_EXPR, type, diff,
10257 fold_convert (type, esz));
10262 if (flag_unsafe_math_optimizations
10263 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10264 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10265 && (tem = distribute_real_division (code, type, arg0, arg1)))
10268 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10269 same or one. Make sure type is not saturating.
10270 fold_plusminus_mult_expr will re-associate. */
10271 if ((TREE_CODE (arg0) == MULT_EXPR
10272 || TREE_CODE (arg1) == MULT_EXPR)
10273 && !TYPE_SATURATING (type)
10274 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10276 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10284 /* (-A) * (-B) -> A * B */
10285 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10286 return fold_build2 (MULT_EXPR, type,
10287 fold_convert (type, TREE_OPERAND (arg0, 0)),
10288 fold_convert (type, negate_expr (arg1)));
10289 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10290 return fold_build2 (MULT_EXPR, type,
10291 fold_convert (type, negate_expr (arg0)),
10292 fold_convert (type, TREE_OPERAND (arg1, 0)));
10294 if (! FLOAT_TYPE_P (type))
10296 if (integer_zerop (arg1))
10297 return omit_one_operand (type, arg1, arg0);
10298 if (integer_onep (arg1))
10299 return non_lvalue (fold_convert (type, arg0));
10300 /* Transform x * -1 into -x. Make sure to do the negation
10301 on the original operand with conversions not stripped
10302 because we can only strip non-sign-changing conversions. */
10303 if (integer_all_onesp (arg1))
10304 return fold_convert (type, negate_expr (op0));
10305 /* Transform x * -C into -x * C if x is easily negatable. */
10306 if (TREE_CODE (arg1) == INTEGER_CST
10307 && tree_int_cst_sgn (arg1) == -1
10308 && negate_expr_p (arg0)
10309 && (tem = negate_expr (arg1)) != arg1
10310 && !TREE_OVERFLOW (tem))
10311 return fold_build2 (MULT_EXPR, type,
10312 fold_convert (type, negate_expr (arg0)), tem);
10314 /* (a * (1 << b)) is (a << b) */
10315 if (TREE_CODE (arg1) == LSHIFT_EXPR
10316 && integer_onep (TREE_OPERAND (arg1, 0)))
10317 return fold_build2 (LSHIFT_EXPR, type, op0,
10318 TREE_OPERAND (arg1, 1));
10319 if (TREE_CODE (arg0) == LSHIFT_EXPR
10320 && integer_onep (TREE_OPERAND (arg0, 0)))
10321 return fold_build2 (LSHIFT_EXPR, type, op1,
10322 TREE_OPERAND (arg0, 1));
10324 strict_overflow_p = false;
10325 if (TREE_CODE (arg1) == INTEGER_CST
10326 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10327 &strict_overflow_p)))
10329 if (strict_overflow_p)
10330 fold_overflow_warning (("assuming signed overflow does not "
10331 "occur when simplifying "
10333 WARN_STRICT_OVERFLOW_MISC);
10334 return fold_convert (type, tem);
10337 /* Optimize z * conj(z) for integer complex numbers. */
10338 if (TREE_CODE (arg0) == CONJ_EXPR
10339 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10340 return fold_mult_zconjz (type, arg1);
10341 if (TREE_CODE (arg1) == CONJ_EXPR
10342 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10343 return fold_mult_zconjz (type, arg0);
10347 /* Maybe fold x * 0 to 0. The expressions aren't the same
10348 when x is NaN, since x * 0 is also NaN. Nor are they the
10349 same in modes with signed zeros, since multiplying a
10350 negative value by 0 gives -0, not +0. */
10351 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10352 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10353 && real_zerop (arg1))
10354 return omit_one_operand (type, arg1, arg0);
10355 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10356 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10357 && real_onep (arg1))
10358 return non_lvalue (fold_convert (type, arg0));
10360 /* Transform x * -1.0 into -x. */
10361 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10362 && real_minus_onep (arg1))
10363 return fold_convert (type, negate_expr (arg0));
10365 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10366 the result for floating point types due to rounding so it is applied
10367 only if -fassociative-math was specify. */
10368 if (flag_associative_math
10369 && TREE_CODE (arg0) == RDIV_EXPR
10370 && TREE_CODE (arg1) == REAL_CST
10371 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10373 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10376 return fold_build2 (RDIV_EXPR, type, tem,
10377 TREE_OPERAND (arg0, 1));
10380 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10381 if (operand_equal_p (arg0, arg1, 0))
10383 tree tem = fold_strip_sign_ops (arg0);
10384 if (tem != NULL_TREE)
10386 tem = fold_convert (type, tem);
10387 return fold_build2 (MULT_EXPR, type, tem, tem);
10391 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10392 This is not the same for NaNs or if signed zeros are
10394 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10395 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10396 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10397 && TREE_CODE (arg1) == COMPLEX_CST
10398 && real_zerop (TREE_REALPART (arg1)))
10400 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10401 if (real_onep (TREE_IMAGPART (arg1)))
10402 return fold_build2 (COMPLEX_EXPR, type,
10403 negate_expr (fold_build1 (IMAGPART_EXPR,
10405 fold_build1 (REALPART_EXPR, rtype, arg0));
10406 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10407 return fold_build2 (COMPLEX_EXPR, type,
10408 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10409 negate_expr (fold_build1 (REALPART_EXPR,
10413 /* Optimize z * conj(z) for floating point complex numbers.
10414 Guarded by flag_unsafe_math_optimizations as non-finite
10415 imaginary components don't produce scalar results. */
10416 if (flag_unsafe_math_optimizations
10417 && TREE_CODE (arg0) == CONJ_EXPR
10418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10419 return fold_mult_zconjz (type, arg1);
10420 if (flag_unsafe_math_optimizations
10421 && TREE_CODE (arg1) == CONJ_EXPR
10422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10423 return fold_mult_zconjz (type, arg0);
10425 if (flag_unsafe_math_optimizations)
10427 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10428 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10430 /* Optimizations of root(...)*root(...). */
10431 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10434 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10435 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10437 /* Optimize sqrt(x)*sqrt(x) as x. */
10438 if (BUILTIN_SQRT_P (fcode0)
10439 && operand_equal_p (arg00, arg10, 0)
10440 && ! HONOR_SNANS (TYPE_MODE (type)))
10443 /* Optimize root(x)*root(y) as root(x*y). */
10444 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10445 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10446 return build_call_expr (rootfn, 1, arg);
10449 /* Optimize expN(x)*expN(y) as expN(x+y). */
10450 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10452 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10453 tree arg = fold_build2 (PLUS_EXPR, type,
10454 CALL_EXPR_ARG (arg0, 0),
10455 CALL_EXPR_ARG (arg1, 0));
10456 return build_call_expr (expfn, 1, arg);
10459 /* Optimizations of pow(...)*pow(...). */
10460 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10461 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10462 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10464 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10465 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10466 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10467 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10469 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10470 if (operand_equal_p (arg01, arg11, 0))
10472 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10473 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10474 return build_call_expr (powfn, 2, arg, arg01);
10477 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10478 if (operand_equal_p (arg00, arg10, 0))
10480 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10481 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10482 return build_call_expr (powfn, 2, arg00, arg);
10486 /* Optimize tan(x)*cos(x) as sin(x). */
10487 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10488 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10489 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10490 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10491 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10492 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10493 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10494 CALL_EXPR_ARG (arg1, 0), 0))
10496 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10498 if (sinfn != NULL_TREE)
10499 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10502 /* Optimize x*pow(x,c) as pow(x,c+1). */
10503 if (fcode1 == BUILT_IN_POW
10504 || fcode1 == BUILT_IN_POWF
10505 || fcode1 == BUILT_IN_POWL)
10507 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10508 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10509 if (TREE_CODE (arg11) == REAL_CST
10510 && !TREE_OVERFLOW (arg11)
10511 && operand_equal_p (arg0, arg10, 0))
10513 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10517 c = TREE_REAL_CST (arg11);
10518 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10519 arg = build_real (type, c);
10520 return build_call_expr (powfn, 2, arg0, arg);
10524 /* Optimize pow(x,c)*x as pow(x,c+1). */
10525 if (fcode0 == BUILT_IN_POW
10526 || fcode0 == BUILT_IN_POWF
10527 || fcode0 == BUILT_IN_POWL)
10529 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10530 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10531 if (TREE_CODE (arg01) == REAL_CST
10532 && !TREE_OVERFLOW (arg01)
10533 && operand_equal_p (arg1, arg00, 0))
10535 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10539 c = TREE_REAL_CST (arg01);
10540 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10541 arg = build_real (type, c);
10542 return build_call_expr (powfn, 2, arg1, arg);
10546 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10547 if (! optimize_size
10548 && operand_equal_p (arg0, arg1, 0))
10550 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10554 tree arg = build_real (type, dconst2);
10555 return build_call_expr (powfn, 2, arg0, arg);
10564 if (integer_all_onesp (arg1))
10565 return omit_one_operand (type, arg1, arg0);
10566 if (integer_zerop (arg1))
10567 return non_lvalue (fold_convert (type, arg0));
10568 if (operand_equal_p (arg0, arg1, 0))
10569 return non_lvalue (fold_convert (type, arg0));
10571 /* ~X | X is -1. */
10572 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10573 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10575 t1 = fold_convert (type, integer_zero_node);
10576 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10577 return omit_one_operand (type, t1, arg1);
10580 /* X | ~X is -1. */
10581 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10582 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10584 t1 = fold_convert (type, integer_zero_node);
10585 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10586 return omit_one_operand (type, t1, arg0);
10589 /* Canonicalize (X & C1) | C2. */
10590 if (TREE_CODE (arg0) == BIT_AND_EXPR
10591 && TREE_CODE (arg1) == INTEGER_CST
10592 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10594 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10595 int width = TYPE_PRECISION (type), w;
10596 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10597 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10598 hi2 = TREE_INT_CST_HIGH (arg1);
10599 lo2 = TREE_INT_CST_LOW (arg1);
10601 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10602 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10603 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10605 if (width > HOST_BITS_PER_WIDE_INT)
10607 mhi = (unsigned HOST_WIDE_INT) -1
10608 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10614 mlo = (unsigned HOST_WIDE_INT) -1
10615 >> (HOST_BITS_PER_WIDE_INT - width);
10618 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10619 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10620 return fold_build2 (BIT_IOR_EXPR, type,
10621 TREE_OPERAND (arg0, 0), arg1);
10623 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10624 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10625 mode which allows further optimizations. */
10632 for (w = BITS_PER_UNIT;
10633 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10636 unsigned HOST_WIDE_INT mask
10637 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10638 if (((lo1 | lo2) & mask) == mask
10639 && (lo1 & ~mask) == 0 && hi1 == 0)
10646 if (hi3 != hi1 || lo3 != lo1)
10647 return fold_build2 (BIT_IOR_EXPR, type,
10648 fold_build2 (BIT_AND_EXPR, type,
10649 TREE_OPERAND (arg0, 0),
10650 build_int_cst_wide (type,
10655 /* (X & Y) | Y is (X, Y). */
10656 if (TREE_CODE (arg0) == BIT_AND_EXPR
10657 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10658 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10659 /* (X & Y) | X is (Y, X). */
10660 if (TREE_CODE (arg0) == BIT_AND_EXPR
10661 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10662 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10663 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10664 /* X | (X & Y) is (Y, X). */
10665 if (TREE_CODE (arg1) == BIT_AND_EXPR
10666 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10667 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10668 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10669 /* X | (Y & X) is (Y, X). */
10670 if (TREE_CODE (arg1) == BIT_AND_EXPR
10671 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10672 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10673 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10675 t1 = distribute_bit_expr (code, type, arg0, arg1);
10676 if (t1 != NULL_TREE)
10679 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10681 This results in more efficient code for machines without a NAND
10682 instruction. Combine will canonicalize to the first form
10683 which will allow use of NAND instructions provided by the
10684 backend if they exist. */
10685 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10686 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10688 return fold_build1 (BIT_NOT_EXPR, type,
10689 build2 (BIT_AND_EXPR, type,
10690 TREE_OPERAND (arg0, 0),
10691 TREE_OPERAND (arg1, 0)));
10694 /* See if this can be simplified into a rotate first. If that
10695 is unsuccessful continue in the association code. */
10699 if (integer_zerop (arg1))
10700 return non_lvalue (fold_convert (type, arg0));
10701 if (integer_all_onesp (arg1))
10702 return fold_build1 (BIT_NOT_EXPR, type, op0);
10703 if (operand_equal_p (arg0, arg1, 0))
10704 return omit_one_operand (type, integer_zero_node, arg0);
10706 /* ~X ^ X is -1. */
10707 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10708 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10710 t1 = fold_convert (type, integer_zero_node);
10711 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10712 return omit_one_operand (type, t1, arg1);
10715 /* X ^ ~X is -1. */
10716 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10717 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10719 t1 = fold_convert (type, integer_zero_node);
10720 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10721 return omit_one_operand (type, t1, arg0);
10724 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10725 with a constant, and the two constants have no bits in common,
10726 we should treat this as a BIT_IOR_EXPR since this may produce more
10727 simplifications. */
10728 if (TREE_CODE (arg0) == BIT_AND_EXPR
10729 && TREE_CODE (arg1) == BIT_AND_EXPR
10730 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10731 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10732 && integer_zerop (const_binop (BIT_AND_EXPR,
10733 TREE_OPERAND (arg0, 1),
10734 TREE_OPERAND (arg1, 1), 0)))
10736 code = BIT_IOR_EXPR;
10740 /* (X | Y) ^ X -> Y & ~ X*/
10741 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10742 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10744 tree t2 = TREE_OPERAND (arg0, 1);
10745 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10747 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10748 fold_convert (type, t1));
10752 /* (Y | X) ^ X -> Y & ~ X*/
10753 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10754 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10756 tree t2 = TREE_OPERAND (arg0, 0);
10757 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10759 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10760 fold_convert (type, t1));
10764 /* X ^ (X | Y) -> Y & ~ X*/
10765 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10766 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10768 tree t2 = TREE_OPERAND (arg1, 1);
10769 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10771 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10772 fold_convert (type, t1));
10776 /* X ^ (Y | X) -> Y & ~ X*/
10777 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10778 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10780 tree t2 = TREE_OPERAND (arg1, 0);
10781 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10783 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10784 fold_convert (type, t1));
10788 /* Convert ~X ^ ~Y to X ^ Y. */
10789 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10790 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10791 return fold_build2 (code, type,
10792 fold_convert (type, TREE_OPERAND (arg0, 0)),
10793 fold_convert (type, TREE_OPERAND (arg1, 0)));
10795 /* Convert ~X ^ C to X ^ ~C. */
10796 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10797 && TREE_CODE (arg1) == INTEGER_CST)
10798 return fold_build2 (code, type,
10799 fold_convert (type, TREE_OPERAND (arg0, 0)),
10800 fold_build1 (BIT_NOT_EXPR, type, arg1));
10802 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10803 if (TREE_CODE (arg0) == BIT_AND_EXPR
10804 && integer_onep (TREE_OPERAND (arg0, 1))
10805 && integer_onep (arg1))
10806 return fold_build2 (EQ_EXPR, type, arg0,
10807 build_int_cst (TREE_TYPE (arg0), 0));
10809 /* Fold (X & Y) ^ Y as ~X & Y. */
10810 if (TREE_CODE (arg0) == BIT_AND_EXPR
10811 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10813 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10814 return fold_build2 (BIT_AND_EXPR, type,
10815 fold_build1 (BIT_NOT_EXPR, type, tem),
10816 fold_convert (type, arg1));
10818 /* Fold (X & Y) ^ X as ~Y & X. */
10819 if (TREE_CODE (arg0) == BIT_AND_EXPR
10820 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10821 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10823 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10824 return fold_build2 (BIT_AND_EXPR, type,
10825 fold_build1 (BIT_NOT_EXPR, type, tem),
10826 fold_convert (type, arg1));
10828 /* Fold X ^ (X & Y) as X & ~Y. */
10829 if (TREE_CODE (arg1) == BIT_AND_EXPR
10830 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10832 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10833 return fold_build2 (BIT_AND_EXPR, type,
10834 fold_convert (type, arg0),
10835 fold_build1 (BIT_NOT_EXPR, type, tem));
10837 /* Fold X ^ (Y & X) as ~Y & X. */
10838 if (TREE_CODE (arg1) == BIT_AND_EXPR
10839 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10840 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10842 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10843 return fold_build2 (BIT_AND_EXPR, type,
10844 fold_build1 (BIT_NOT_EXPR, type, tem),
10845 fold_convert (type, arg0));
10848 /* See if this can be simplified into a rotate first. If that
10849 is unsuccessful continue in the association code. */
10853 if (integer_all_onesp (arg1))
10854 return non_lvalue (fold_convert (type, arg0));
10855 if (integer_zerop (arg1))
10856 return omit_one_operand (type, arg1, arg0);
10857 if (operand_equal_p (arg0, arg1, 0))
10858 return non_lvalue (fold_convert (type, arg0));
10860 /* ~X & X is always zero. */
10861 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10862 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10863 return omit_one_operand (type, integer_zero_node, arg1);
10865 /* X & ~X is always zero. */
10866 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10867 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10868 return omit_one_operand (type, integer_zero_node, arg0);
10870 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10871 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10872 && TREE_CODE (arg1) == INTEGER_CST
10873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10875 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10876 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10877 TREE_OPERAND (arg0, 0), tmp1);
10878 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10879 TREE_OPERAND (arg0, 1), tmp1);
10880 return fold_convert (type,
10881 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10885 /* (X | Y) & Y is (X, Y). */
10886 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10887 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10888 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10889 /* (X | Y) & X is (Y, X). */
10890 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10891 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10892 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10893 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10894 /* X & (X | Y) is (Y, X). */
10895 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10896 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10897 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10898 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10899 /* X & (Y | X) is (Y, X). */
10900 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10901 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10902 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10903 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10905 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10906 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10907 && integer_onep (TREE_OPERAND (arg0, 1))
10908 && integer_onep (arg1))
10910 tem = TREE_OPERAND (arg0, 0);
10911 return fold_build2 (EQ_EXPR, type,
10912 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10913 build_int_cst (TREE_TYPE (tem), 1)),
10914 build_int_cst (TREE_TYPE (tem), 0));
10916 /* Fold ~X & 1 as (X & 1) == 0. */
10917 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10918 && integer_onep (arg1))
10920 tem = TREE_OPERAND (arg0, 0);
10921 return fold_build2 (EQ_EXPR, type,
10922 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10923 build_int_cst (TREE_TYPE (tem), 1)),
10924 build_int_cst (TREE_TYPE (tem), 0));
10927 /* Fold (X ^ Y) & Y as ~X & Y. */
10928 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10929 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10931 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10932 return fold_build2 (BIT_AND_EXPR, type,
10933 fold_build1 (BIT_NOT_EXPR, type, tem),
10934 fold_convert (type, arg1));
10936 /* Fold (X ^ Y) & X as ~Y & X. */
10937 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10938 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10939 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10941 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10942 return fold_build2 (BIT_AND_EXPR, type,
10943 fold_build1 (BIT_NOT_EXPR, type, tem),
10944 fold_convert (type, arg1));
10946 /* Fold X & (X ^ Y) as X & ~Y. */
10947 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10950 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10951 return fold_build2 (BIT_AND_EXPR, type,
10952 fold_convert (type, arg0),
10953 fold_build1 (BIT_NOT_EXPR, type, tem));
10955 /* Fold X & (Y ^ X) as ~Y & X. */
10956 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10957 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10958 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10960 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10961 return fold_build2 (BIT_AND_EXPR, type,
10962 fold_build1 (BIT_NOT_EXPR, type, tem),
10963 fold_convert (type, arg0));
10966 t1 = distribute_bit_expr (code, type, arg0, arg1);
10967 if (t1 != NULL_TREE)
10969 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10970 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10971 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10974 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10976 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10977 && (~TREE_INT_CST_LOW (arg1)
10978 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10979 return fold_convert (type, TREE_OPERAND (arg0, 0));
10982 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10984 This results in more efficient code for machines without a NOR
10985 instruction. Combine will canonicalize to the first form
10986 which will allow use of NOR instructions provided by the
10987 backend if they exist. */
10988 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10989 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10991 return fold_build1 (BIT_NOT_EXPR, type,
10992 build2 (BIT_IOR_EXPR, type,
10993 fold_convert (type,
10994 TREE_OPERAND (arg0, 0)),
10995 fold_convert (type,
10996 TREE_OPERAND (arg1, 0))));
10999 /* If arg0 is derived from the address of an object or function, we may
11000 be able to fold this expression using the object or function's
11002 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11004 unsigned HOST_WIDE_INT modulus, residue;
11005 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11007 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11009 /* This works because modulus is a power of 2. If this weren't the
11010 case, we'd have to replace it by its greatest power-of-2
11011 divisor: modulus & -modulus. */
11013 return build_int_cst (type, residue & low);
11016 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11017 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11018 if the new mask might be further optimized. */
11019 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11020 || TREE_CODE (arg0) == RSHIFT_EXPR)
11021 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11022 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11023 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11024 < TYPE_PRECISION (TREE_TYPE (arg0))
11025 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11026 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11028 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11029 unsigned HOST_WIDE_INT mask
11030 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11031 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11032 tree shift_type = TREE_TYPE (arg0);
11034 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11035 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11036 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11037 && TYPE_PRECISION (TREE_TYPE (arg0))
11038 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11040 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11041 tree arg00 = TREE_OPERAND (arg0, 0);
11042 /* See if more bits can be proven as zero because of
11044 if (TREE_CODE (arg00) == NOP_EXPR
11045 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11047 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11048 if (TYPE_PRECISION (inner_type)
11049 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11050 && TYPE_PRECISION (inner_type) < prec)
11052 prec = TYPE_PRECISION (inner_type);
11053 /* See if we can shorten the right shift. */
11055 shift_type = inner_type;
11058 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11059 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11060 zerobits <<= prec - shiftc;
11061 /* For arithmetic shift if sign bit could be set, zerobits
11062 can contain actually sign bits, so no transformation is
11063 possible, unless MASK masks them all away. In that
11064 case the shift needs to be converted into logical shift. */
11065 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11066 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11068 if ((mask & zerobits) == 0)
11069 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11075 /* ((X << 16) & 0xff00) is (X, 0). */
11076 if ((mask & zerobits) == mask)
11077 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11079 newmask = mask | zerobits;
11080 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11084 /* Only do the transformation if NEWMASK is some integer
11086 for (prec = BITS_PER_UNIT;
11087 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11088 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11090 if (prec < HOST_BITS_PER_WIDE_INT
11091 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11093 if (shift_type != TREE_TYPE (arg0))
11095 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11096 fold_convert (shift_type,
11097 TREE_OPERAND (arg0, 0)),
11098 TREE_OPERAND (arg0, 1));
11099 tem = fold_convert (type, tem);
11103 return fold_build2 (BIT_AND_EXPR, type, tem,
11104 build_int_cst_type (TREE_TYPE (op1),
11113 /* Don't touch a floating-point divide by zero unless the mode
11114 of the constant can represent infinity. */
11115 if (TREE_CODE (arg1) == REAL_CST
11116 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11117 && real_zerop (arg1))
11120 /* Optimize A / A to 1.0 if we don't care about
11121 NaNs or Infinities. Skip the transformation
11122 for non-real operands. */
11123 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11124 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11125 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11126 && operand_equal_p (arg0, arg1, 0))
11128 tree r = build_real (TREE_TYPE (arg0), dconst1);
11130 return omit_two_operands (type, r, arg0, arg1);
11133 /* The complex version of the above A / A optimization. */
11134 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11135 && operand_equal_p (arg0, arg1, 0))
11137 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11138 if (! HONOR_NANS (TYPE_MODE (elem_type))
11139 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11141 tree r = build_real (elem_type, dconst1);
11142 /* omit_two_operands will call fold_convert for us. */
11143 return omit_two_operands (type, r, arg0, arg1);
11147 /* (-A) / (-B) -> A / B */
11148 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11149 return fold_build2 (RDIV_EXPR, type,
11150 TREE_OPERAND (arg0, 0),
11151 negate_expr (arg1));
11152 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11153 return fold_build2 (RDIV_EXPR, type,
11154 negate_expr (arg0),
11155 TREE_OPERAND (arg1, 0));
11157 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11158 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11159 && real_onep (arg1))
11160 return non_lvalue (fold_convert (type, arg0));
11162 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11163 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11164 && real_minus_onep (arg1))
11165 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11167 /* If ARG1 is a constant, we can convert this to a multiply by the
11168 reciprocal. This does not have the same rounding properties,
11169 so only do this if -freciprocal-math. We can actually
11170 always safely do it if ARG1 is a power of two, but it's hard to
11171 tell if it is or not in a portable manner. */
11172 if (TREE_CODE (arg1) == REAL_CST)
11174 if (flag_reciprocal_math
11175 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11177 return fold_build2 (MULT_EXPR, type, arg0, tem);
11178 /* Find the reciprocal if optimizing and the result is exact. */
11182 r = TREE_REAL_CST (arg1);
11183 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11185 tem = build_real (type, r);
11186 return fold_build2 (MULT_EXPR, type,
11187 fold_convert (type, arg0), tem);
11191 /* Convert A/B/C to A/(B*C). */
11192 if (flag_reciprocal_math
11193 && TREE_CODE (arg0) == RDIV_EXPR)
11194 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11195 fold_build2 (MULT_EXPR, type,
11196 TREE_OPERAND (arg0, 1), arg1));
11198 /* Convert A/(B/C) to (A/B)*C. */
11199 if (flag_reciprocal_math
11200 && TREE_CODE (arg1) == RDIV_EXPR)
11201 return fold_build2 (MULT_EXPR, type,
11202 fold_build2 (RDIV_EXPR, type, arg0,
11203 TREE_OPERAND (arg1, 0)),
11204 TREE_OPERAND (arg1, 1));
11206 /* Convert C1/(X*C2) into (C1/C2)/X. */
11207 if (flag_reciprocal_math
11208 && TREE_CODE (arg1) == MULT_EXPR
11209 && TREE_CODE (arg0) == REAL_CST
11210 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11212 tree tem = const_binop (RDIV_EXPR, arg0,
11213 TREE_OPERAND (arg1, 1), 0);
11215 return fold_build2 (RDIV_EXPR, type, tem,
11216 TREE_OPERAND (arg1, 0));
11219 if (flag_unsafe_math_optimizations)
11221 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11222 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11224 /* Optimize sin(x)/cos(x) as tan(x). */
11225 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11226 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11227 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11228 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11229 CALL_EXPR_ARG (arg1, 0), 0))
11231 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11233 if (tanfn != NULL_TREE)
11234 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11237 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11238 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11239 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11240 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11241 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11242 CALL_EXPR_ARG (arg1, 0), 0))
11244 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11246 if (tanfn != NULL_TREE)
11248 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11249 return fold_build2 (RDIV_EXPR, type,
11250 build_real (type, dconst1), tmp);
11254 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11255 NaNs or Infinities. */
11256 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11257 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11258 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11260 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11261 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11263 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11264 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11265 && operand_equal_p (arg00, arg01, 0))
11267 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11269 if (cosfn != NULL_TREE)
11270 return build_call_expr (cosfn, 1, arg00);
11274 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11275 NaNs or Infinities. */
11276 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11277 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11278 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11280 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11281 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11283 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11284 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11285 && operand_equal_p (arg00, arg01, 0))
11287 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11289 if (cosfn != NULL_TREE)
11291 tree tmp = build_call_expr (cosfn, 1, arg00);
11292 return fold_build2 (RDIV_EXPR, type,
11293 build_real (type, dconst1),
11299 /* Optimize pow(x,c)/x as pow(x,c-1). */
11300 if (fcode0 == BUILT_IN_POW
11301 || fcode0 == BUILT_IN_POWF
11302 || fcode0 == BUILT_IN_POWL)
11304 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11305 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11306 if (TREE_CODE (arg01) == REAL_CST
11307 && !TREE_OVERFLOW (arg01)
11308 && operand_equal_p (arg1, arg00, 0))
11310 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11314 c = TREE_REAL_CST (arg01);
11315 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11316 arg = build_real (type, c);
11317 return build_call_expr (powfn, 2, arg1, arg);
11321 /* Optimize a/root(b/c) into a*root(c/b). */
11322 if (BUILTIN_ROOT_P (fcode1))
11324 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11326 if (TREE_CODE (rootarg) == RDIV_EXPR)
11328 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11329 tree b = TREE_OPERAND (rootarg, 0);
11330 tree c = TREE_OPERAND (rootarg, 1);
11332 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11334 tmp = build_call_expr (rootfn, 1, tmp);
11335 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11339 /* Optimize x/expN(y) into x*expN(-y). */
11340 if (BUILTIN_EXPONENT_P (fcode1))
11342 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11343 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11344 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11345 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11348 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11349 if (fcode1 == BUILT_IN_POW
11350 || fcode1 == BUILT_IN_POWF
11351 || fcode1 == BUILT_IN_POWL)
11353 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11354 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11355 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11356 tree neg11 = fold_convert (type, negate_expr (arg11));
11357 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11358 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11363 case TRUNC_DIV_EXPR:
11364 case FLOOR_DIV_EXPR:
11365 /* Simplify A / (B << N) where A and B are positive and B is
11366 a power of 2, to A >> (N + log2(B)). */
11367 strict_overflow_p = false;
11368 if (TREE_CODE (arg1) == LSHIFT_EXPR
11369 && (TYPE_UNSIGNED (type)
11370 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11372 tree sval = TREE_OPERAND (arg1, 0);
11373 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11375 tree sh_cnt = TREE_OPERAND (arg1, 1);
11376 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11378 if (strict_overflow_p)
11379 fold_overflow_warning (("assuming signed overflow does not "
11380 "occur when simplifying A / (B << N)"),
11381 WARN_STRICT_OVERFLOW_MISC);
11383 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11384 sh_cnt, build_int_cst (NULL_TREE, pow2));
11385 return fold_build2 (RSHIFT_EXPR, type,
11386 fold_convert (type, arg0), sh_cnt);
11390 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11391 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11392 if (INTEGRAL_TYPE_P (type)
11393 && TYPE_UNSIGNED (type)
11394 && code == FLOOR_DIV_EXPR)
11395 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11399 case ROUND_DIV_EXPR:
11400 case CEIL_DIV_EXPR:
11401 case EXACT_DIV_EXPR:
11402 if (integer_onep (arg1))
11403 return non_lvalue (fold_convert (type, arg0));
11404 if (integer_zerop (arg1))
11406 /* X / -1 is -X. */
11407 if (!TYPE_UNSIGNED (type)
11408 && TREE_CODE (arg1) == INTEGER_CST
11409 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11410 && TREE_INT_CST_HIGH (arg1) == -1)
11411 return fold_convert (type, negate_expr (arg0));
11413 /* Convert -A / -B to A / B when the type is signed and overflow is
11415 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11416 && TREE_CODE (arg0) == NEGATE_EXPR
11417 && negate_expr_p (arg1))
11419 if (INTEGRAL_TYPE_P (type))
11420 fold_overflow_warning (("assuming signed overflow does not occur "
11421 "when distributing negation across "
11423 WARN_STRICT_OVERFLOW_MISC);
11424 return fold_build2 (code, type,
11425 fold_convert (type, TREE_OPERAND (arg0, 0)),
11426 negate_expr (arg1));
11428 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11429 && TREE_CODE (arg1) == NEGATE_EXPR
11430 && negate_expr_p (arg0))
11432 if (INTEGRAL_TYPE_P (type))
11433 fold_overflow_warning (("assuming signed overflow does not occur "
11434 "when distributing negation across "
11436 WARN_STRICT_OVERFLOW_MISC);
11437 return fold_build2 (code, type, negate_expr (arg0),
11438 TREE_OPERAND (arg1, 0));
11441 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11442 operation, EXACT_DIV_EXPR.
11444 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11445 At one time others generated faster code, it's not clear if they do
11446 after the last round to changes to the DIV code in expmed.c. */
11447 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11448 && multiple_of_p (type, arg0, arg1))
11449 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11451 strict_overflow_p = false;
11452 if (TREE_CODE (arg1) == INTEGER_CST
11453 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11454 &strict_overflow_p)))
11456 if (strict_overflow_p)
11457 fold_overflow_warning (("assuming signed overflow does not occur "
11458 "when simplifying division"),
11459 WARN_STRICT_OVERFLOW_MISC);
11460 return fold_convert (type, tem);
11465 case CEIL_MOD_EXPR:
11466 case FLOOR_MOD_EXPR:
11467 case ROUND_MOD_EXPR:
11468 case TRUNC_MOD_EXPR:
11469 /* X % 1 is always zero, but be sure to preserve any side
11471 if (integer_onep (arg1))
11472 return omit_one_operand (type, integer_zero_node, arg0);
11474 /* X % 0, return X % 0 unchanged so that we can get the
11475 proper warnings and errors. */
11476 if (integer_zerop (arg1))
11479 /* 0 % X is always zero, but be sure to preserve any side
11480 effects in X. Place this after checking for X == 0. */
11481 if (integer_zerop (arg0))
11482 return omit_one_operand (type, integer_zero_node, arg1);
11484 /* X % -1 is zero. */
11485 if (!TYPE_UNSIGNED (type)
11486 && TREE_CODE (arg1) == INTEGER_CST
11487 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11488 && TREE_INT_CST_HIGH (arg1) == -1)
11489 return omit_one_operand (type, integer_zero_node, arg0);
11491 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11492 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11493 strict_overflow_p = false;
11494 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11495 && (TYPE_UNSIGNED (type)
11496 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11499 /* Also optimize A % (C << N) where C is a power of 2,
11500 to A & ((C << N) - 1). */
11501 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11502 c = TREE_OPERAND (arg1, 0);
11504 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11506 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11507 build_int_cst (TREE_TYPE (arg1), 1));
11508 if (strict_overflow_p)
11509 fold_overflow_warning (("assuming signed overflow does not "
11510 "occur when simplifying "
11511 "X % (power of two)"),
11512 WARN_STRICT_OVERFLOW_MISC);
11513 return fold_build2 (BIT_AND_EXPR, type,
11514 fold_convert (type, arg0),
11515 fold_convert (type, mask));
11519 /* X % -C is the same as X % C. */
11520 if (code == TRUNC_MOD_EXPR
11521 && !TYPE_UNSIGNED (type)
11522 && TREE_CODE (arg1) == INTEGER_CST
11523 && !TREE_OVERFLOW (arg1)
11524 && TREE_INT_CST_HIGH (arg1) < 0
11525 && !TYPE_OVERFLOW_TRAPS (type)
11526 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11527 && !sign_bit_p (arg1, arg1))
11528 return fold_build2 (code, type, fold_convert (type, arg0),
11529 fold_convert (type, negate_expr (arg1)));
11531 /* X % -Y is the same as X % Y. */
11532 if (code == TRUNC_MOD_EXPR
11533 && !TYPE_UNSIGNED (type)
11534 && TREE_CODE (arg1) == NEGATE_EXPR
11535 && !TYPE_OVERFLOW_TRAPS (type))
11536 return fold_build2 (code, type, fold_convert (type, arg0),
11537 fold_convert (type, TREE_OPERAND (arg1, 0)));
11539 if (TREE_CODE (arg1) == INTEGER_CST
11540 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11541 &strict_overflow_p)))
11543 if (strict_overflow_p)
11544 fold_overflow_warning (("assuming signed overflow does not occur "
11545 "when simplifying modulos"),
11546 WARN_STRICT_OVERFLOW_MISC);
11547 return fold_convert (type, tem);
11554 if (integer_all_onesp (arg0))
11555 return omit_one_operand (type, arg0, arg1);
11559 /* Optimize -1 >> x for arithmetic right shifts. */
11560 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11561 return omit_one_operand (type, arg0, arg1);
11562 /* ... fall through ... */
11566 if (integer_zerop (arg1))
11567 return non_lvalue (fold_convert (type, arg0));
11568 if (integer_zerop (arg0))
11569 return omit_one_operand (type, arg0, arg1);
11571 /* Since negative shift count is not well-defined,
11572 don't try to compute it in the compiler. */
11573 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11576 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11577 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11578 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11579 && host_integerp (TREE_OPERAND (arg0, 1), false)
11580 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11582 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11583 + TREE_INT_CST_LOW (arg1));
11585 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11586 being well defined. */
11587 if (low >= TYPE_PRECISION (type))
11589 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11590 low = low % TYPE_PRECISION (type);
11591 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11592 return build_int_cst (type, 0);
11594 low = TYPE_PRECISION (type) - 1;
11597 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11598 build_int_cst (type, low));
11601 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11602 into x & ((unsigned)-1 >> c) for unsigned types. */
11603 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11604 || (TYPE_UNSIGNED (type)
11605 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11606 && host_integerp (arg1, false)
11607 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11608 && host_integerp (TREE_OPERAND (arg0, 1), false)
11609 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11611 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11612 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11618 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11620 lshift = build_int_cst (type, -1);
11621 lshift = int_const_binop (code, lshift, arg1, 0);
11623 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11627 /* Rewrite an LROTATE_EXPR by a constant into an
11628 RROTATE_EXPR by a new constant. */
11629 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11631 tree tem = build_int_cst (TREE_TYPE (arg1),
11632 GET_MODE_BITSIZE (TYPE_MODE (type)));
11633 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11634 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11637 /* If we have a rotate of a bit operation with the rotate count and
11638 the second operand of the bit operation both constant,
11639 permute the two operations. */
11640 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11641 && (TREE_CODE (arg0) == BIT_AND_EXPR
11642 || TREE_CODE (arg0) == BIT_IOR_EXPR
11643 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11644 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11645 return fold_build2 (TREE_CODE (arg0), type,
11646 fold_build2 (code, type,
11647 TREE_OPERAND (arg0, 0), arg1),
11648 fold_build2 (code, type,
11649 TREE_OPERAND (arg0, 1), arg1));
11651 /* Two consecutive rotates adding up to the width of the mode can
11653 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11654 && TREE_CODE (arg0) == RROTATE_EXPR
11655 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11656 && TREE_INT_CST_HIGH (arg1) == 0
11657 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11658 && ((TREE_INT_CST_LOW (arg1)
11659 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11660 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
11661 return TREE_OPERAND (arg0, 0);
11663 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11664 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11665 if the latter can be further optimized. */
11666 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11667 && TREE_CODE (arg0) == BIT_AND_EXPR
11668 && TREE_CODE (arg1) == INTEGER_CST
11669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11671 tree mask = fold_build2 (code, type,
11672 fold_convert (type, TREE_OPERAND (arg0, 1)),
11674 tree shift = fold_build2 (code, type,
11675 fold_convert (type, TREE_OPERAND (arg0, 0)),
11677 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11685 if (operand_equal_p (arg0, arg1, 0))
11686 return omit_one_operand (type, arg0, arg1);
11687 if (INTEGRAL_TYPE_P (type)
11688 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11689 return omit_one_operand (type, arg1, arg0);
11690 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11696 if (operand_equal_p (arg0, arg1, 0))
11697 return omit_one_operand (type, arg0, arg1);
11698 if (INTEGRAL_TYPE_P (type)
11699 && TYPE_MAX_VALUE (type)
11700 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11701 return omit_one_operand (type, arg1, arg0);
11702 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11707 case TRUTH_ANDIF_EXPR:
11708 /* Note that the operands of this must be ints
11709 and their values must be 0 or 1.
11710 ("true" is a fixed value perhaps depending on the language.) */
11711 /* If first arg is constant zero, return it. */
11712 if (integer_zerop (arg0))
11713 return fold_convert (type, arg0);
11714 case TRUTH_AND_EXPR:
11715 /* If either arg is constant true, drop it. */
11716 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11717 return non_lvalue (fold_convert (type, arg1));
11718 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11719 /* Preserve sequence points. */
11720 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11721 return non_lvalue (fold_convert (type, arg0));
11722 /* If second arg is constant zero, result is zero, but first arg
11723 must be evaluated. */
11724 if (integer_zerop (arg1))
11725 return omit_one_operand (type, arg1, arg0);
11726 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11727 case will be handled here. */
11728 if (integer_zerop (arg0))
11729 return omit_one_operand (type, arg0, arg1);
11731 /* !X && X is always false. */
11732 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11733 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11734 return omit_one_operand (type, integer_zero_node, arg1);
11735 /* X && !X is always false. */
11736 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11737 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11738 return omit_one_operand (type, integer_zero_node, arg0);
11740 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11741 means A >= Y && A != MAX, but in this case we know that
11744 if (!TREE_SIDE_EFFECTS (arg0)
11745 && !TREE_SIDE_EFFECTS (arg1))
11747 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11748 if (tem && !operand_equal_p (tem, arg0, 0))
11749 return fold_build2 (code, type, tem, arg1);
11751 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11752 if (tem && !operand_equal_p (tem, arg1, 0))
11753 return fold_build2 (code, type, arg0, tem);
11757 /* We only do these simplifications if we are optimizing. */
11761 /* Check for things like (A || B) && (A || C). We can convert this
11762 to A || (B && C). Note that either operator can be any of the four
11763 truth and/or operations and the transformation will still be
11764 valid. Also note that we only care about order for the
11765 ANDIF and ORIF operators. If B contains side effects, this
11766 might change the truth-value of A. */
11767 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11768 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11769 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11770 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11771 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11772 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11774 tree a00 = TREE_OPERAND (arg0, 0);
11775 tree a01 = TREE_OPERAND (arg0, 1);
11776 tree a10 = TREE_OPERAND (arg1, 0);
11777 tree a11 = TREE_OPERAND (arg1, 1);
11778 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11779 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11780 && (code == TRUTH_AND_EXPR
11781 || code == TRUTH_OR_EXPR));
11783 if (operand_equal_p (a00, a10, 0))
11784 return fold_build2 (TREE_CODE (arg0), type, a00,
11785 fold_build2 (code, type, a01, a11));
11786 else if (commutative && operand_equal_p (a00, a11, 0))
11787 return fold_build2 (TREE_CODE (arg0), type, a00,
11788 fold_build2 (code, type, a01, a10));
11789 else if (commutative && operand_equal_p (a01, a10, 0))
11790 return fold_build2 (TREE_CODE (arg0), type, a01,
11791 fold_build2 (code, type, a00, a11));
11793 /* This case if tricky because we must either have commutative
11794 operators or else A10 must not have side-effects. */
11796 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11797 && operand_equal_p (a01, a11, 0))
11798 return fold_build2 (TREE_CODE (arg0), type,
11799 fold_build2 (code, type, a00, a10),
11803 /* See if we can build a range comparison. */
11804 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11807 /* Check for the possibility of merging component references. If our
11808 lhs is another similar operation, try to merge its rhs with our
11809 rhs. Then try to merge our lhs and rhs. */
11810 if (TREE_CODE (arg0) == code
11811 && 0 != (tem = fold_truthop (code, type,
11812 TREE_OPERAND (arg0, 1), arg1)))
11813 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11815 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11820 case TRUTH_ORIF_EXPR:
11821 /* Note that the operands of this must be ints
11822 and their values must be 0 or true.
11823 ("true" is a fixed value perhaps depending on the language.) */
11824 /* If first arg is constant true, return it. */
11825 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11826 return fold_convert (type, arg0);
11827 case TRUTH_OR_EXPR:
11828 /* If either arg is constant zero, drop it. */
11829 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11830 return non_lvalue (fold_convert (type, arg1));
11831 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11832 /* Preserve sequence points. */
11833 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11834 return non_lvalue (fold_convert (type, arg0));
11835 /* If second arg is constant true, result is true, but we must
11836 evaluate first arg. */
11837 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11838 return omit_one_operand (type, arg1, arg0);
11839 /* Likewise for first arg, but note this only occurs here for
11841 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11842 return omit_one_operand (type, arg0, arg1);
11844 /* !X || X is always true. */
11845 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11846 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11847 return omit_one_operand (type, integer_one_node, arg1);
11848 /* X || !X is always true. */
11849 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11850 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11851 return omit_one_operand (type, integer_one_node, arg0);
11855 case TRUTH_XOR_EXPR:
11856 /* If the second arg is constant zero, drop it. */
11857 if (integer_zerop (arg1))
11858 return non_lvalue (fold_convert (type, arg0));
11859 /* If the second arg is constant true, this is a logical inversion. */
11860 if (integer_onep (arg1))
11862 /* Only call invert_truthvalue if operand is a truth value. */
11863 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11864 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11866 tem = invert_truthvalue (arg0);
11867 return non_lvalue (fold_convert (type, tem));
11869 /* Identical arguments cancel to zero. */
11870 if (operand_equal_p (arg0, arg1, 0))
11871 return omit_one_operand (type, integer_zero_node, arg0);
11873 /* !X ^ X is always true. */
11874 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11875 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11876 return omit_one_operand (type, integer_one_node, arg1);
11878 /* X ^ !X is always true. */
11879 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11880 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11881 return omit_one_operand (type, integer_one_node, arg0);
11887 tem = fold_comparison (code, type, op0, op1);
11888 if (tem != NULL_TREE)
11891 /* bool_var != 0 becomes bool_var. */
11892 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11893 && code == NE_EXPR)
11894 return non_lvalue (fold_convert (type, arg0));
11896 /* bool_var == 1 becomes bool_var. */
11897 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11898 && code == EQ_EXPR)
11899 return non_lvalue (fold_convert (type, arg0));
11901 /* bool_var != 1 becomes !bool_var. */
11902 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11903 && code == NE_EXPR)
11904 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11906 /* bool_var == 0 becomes !bool_var. */
11907 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11908 && code == EQ_EXPR)
11909 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11911 /* If this is an equality comparison of the address of two non-weak,
11912 unaliased symbols neither of which are extern (since we do not
11913 have access to attributes for externs), then we know the result. */
11914 if (TREE_CODE (arg0) == ADDR_EXPR
11915 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11916 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11917 && ! lookup_attribute ("alias",
11918 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11919 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11920 && TREE_CODE (arg1) == ADDR_EXPR
11921 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11922 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11923 && ! lookup_attribute ("alias",
11924 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11925 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11927 /* We know that we're looking at the address of two
11928 non-weak, unaliased, static _DECL nodes.
11930 It is both wasteful and incorrect to call operand_equal_p
11931 to compare the two ADDR_EXPR nodes. It is wasteful in that
11932 all we need to do is test pointer equality for the arguments
11933 to the two ADDR_EXPR nodes. It is incorrect to use
11934 operand_equal_p as that function is NOT equivalent to a
11935 C equality test. It can in fact return false for two
11936 objects which would test as equal using the C equality
11938 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11939 return constant_boolean_node (equal
11940 ? code == EQ_EXPR : code != EQ_EXPR,
11944 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11945 a MINUS_EXPR of a constant, we can convert it into a comparison with
11946 a revised constant as long as no overflow occurs. */
11947 if (TREE_CODE (arg1) == INTEGER_CST
11948 && (TREE_CODE (arg0) == PLUS_EXPR
11949 || TREE_CODE (arg0) == MINUS_EXPR)
11950 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11951 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11952 ? MINUS_EXPR : PLUS_EXPR,
11953 fold_convert (TREE_TYPE (arg0), arg1),
11954 TREE_OPERAND (arg0, 1), 0))
11955 && !TREE_OVERFLOW (tem))
11956 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11958 /* Similarly for a NEGATE_EXPR. */
11959 if (TREE_CODE (arg0) == NEGATE_EXPR
11960 && TREE_CODE (arg1) == INTEGER_CST
11961 && 0 != (tem = negate_expr (arg1))
11962 && TREE_CODE (tem) == INTEGER_CST
11963 && !TREE_OVERFLOW (tem))
11964 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11966 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11967 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11968 && TREE_CODE (arg1) == INTEGER_CST
11969 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11970 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11971 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11972 fold_convert (TREE_TYPE (arg0), arg1),
11973 TREE_OPERAND (arg0, 1)));
11975 /* Transform comparisons of the form X +- C CMP X. */
11976 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11977 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11979 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11980 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11982 tree cst = TREE_OPERAND (arg0, 1);
11984 if (code == EQ_EXPR
11985 && !integer_zerop (cst))
11986 return omit_two_operands (type, boolean_false_node,
11987 TREE_OPERAND (arg0, 0), arg1);
11989 return omit_two_operands (type, boolean_true_node,
11990 TREE_OPERAND (arg0, 0), arg1);
11993 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11994 for !=. Don't do this for ordered comparisons due to overflow. */
11995 if (TREE_CODE (arg0) == MINUS_EXPR
11996 && integer_zerop (arg1))
11997 return fold_build2 (code, type,
11998 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12000 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12001 if (TREE_CODE (arg0) == ABS_EXPR
12002 && (integer_zerop (arg1) || real_zerop (arg1)))
12003 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12005 /* If this is an EQ or NE comparison with zero and ARG0 is
12006 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12007 two operations, but the latter can be done in one less insn
12008 on machines that have only two-operand insns or on which a
12009 constant cannot be the first operand. */
12010 if (TREE_CODE (arg0) == BIT_AND_EXPR
12011 && integer_zerop (arg1))
12013 tree arg00 = TREE_OPERAND (arg0, 0);
12014 tree arg01 = TREE_OPERAND (arg0, 1);
12015 if (TREE_CODE (arg00) == LSHIFT_EXPR
12016 && integer_onep (TREE_OPERAND (arg00, 0)))
12018 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12019 arg01, TREE_OPERAND (arg00, 1));
12020 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12021 build_int_cst (TREE_TYPE (arg0), 1));
12022 return fold_build2 (code, type,
12023 fold_convert (TREE_TYPE (arg1), tem), arg1);
12025 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12026 && integer_onep (TREE_OPERAND (arg01, 0)))
12028 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12029 arg00, TREE_OPERAND (arg01, 1));
12030 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12031 build_int_cst (TREE_TYPE (arg0), 1));
12032 return fold_build2 (code, type,
12033 fold_convert (TREE_TYPE (arg1), tem), arg1);
12037 /* If this is an NE or EQ comparison of zero against the result of a
12038 signed MOD operation whose second operand is a power of 2, make
12039 the MOD operation unsigned since it is simpler and equivalent. */
12040 if (integer_zerop (arg1)
12041 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12042 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12043 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12044 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12045 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12046 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12048 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12049 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12050 fold_convert (newtype,
12051 TREE_OPERAND (arg0, 0)),
12052 fold_convert (newtype,
12053 TREE_OPERAND (arg0, 1)));
12055 return fold_build2 (code, type, newmod,
12056 fold_convert (newtype, arg1));
12059 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12060 C1 is a valid shift constant, and C2 is a power of two, i.e.
12062 if (TREE_CODE (arg0) == BIT_AND_EXPR
12063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12064 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12066 && integer_pow2p (TREE_OPERAND (arg0, 1))
12067 && integer_zerop (arg1))
12069 tree itype = TREE_TYPE (arg0);
12070 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12071 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12073 /* Check for a valid shift count. */
12074 if (TREE_INT_CST_HIGH (arg001) == 0
12075 && TREE_INT_CST_LOW (arg001) < prec)
12077 tree arg01 = TREE_OPERAND (arg0, 1);
12078 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12079 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12080 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12081 can be rewritten as (X & (C2 << C1)) != 0. */
12082 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12084 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12085 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12086 return fold_build2 (code, type, tem, arg1);
12088 /* Otherwise, for signed (arithmetic) shifts,
12089 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12090 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12091 else if (!TYPE_UNSIGNED (itype))
12092 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12093 arg000, build_int_cst (itype, 0));
12094 /* Otherwise, of unsigned (logical) shifts,
12095 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12096 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12098 return omit_one_operand (type,
12099 code == EQ_EXPR ? integer_one_node
12100 : integer_zero_node,
12105 /* If this is an NE comparison of zero with an AND of one, remove the
12106 comparison since the AND will give the correct value. */
12107 if (code == NE_EXPR
12108 && integer_zerop (arg1)
12109 && TREE_CODE (arg0) == BIT_AND_EXPR
12110 && integer_onep (TREE_OPERAND (arg0, 1)))
12111 return fold_convert (type, arg0);
12113 /* If we have (A & C) == C where C is a power of 2, convert this into
12114 (A & C) != 0. Similarly for NE_EXPR. */
12115 if (TREE_CODE (arg0) == BIT_AND_EXPR
12116 && integer_pow2p (TREE_OPERAND (arg0, 1))
12117 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12118 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12119 arg0, fold_convert (TREE_TYPE (arg0),
12120 integer_zero_node));
12122 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12123 bit, then fold the expression into A < 0 or A >= 0. */
12124 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12128 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12129 Similarly for NE_EXPR. */
12130 if (TREE_CODE (arg0) == BIT_AND_EXPR
12131 && TREE_CODE (arg1) == INTEGER_CST
12132 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12134 tree notc = fold_build1 (BIT_NOT_EXPR,
12135 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12136 TREE_OPERAND (arg0, 1));
12137 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12139 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12140 if (integer_nonzerop (dandnotc))
12141 return omit_one_operand (type, rslt, arg0);
12144 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12145 Similarly for NE_EXPR. */
12146 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12147 && TREE_CODE (arg1) == INTEGER_CST
12148 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12150 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12151 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12152 TREE_OPERAND (arg0, 1), notd);
12153 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12154 if (integer_nonzerop (candnotd))
12155 return omit_one_operand (type, rslt, arg0);
12158 /* If this is a comparison of a field, we may be able to simplify it. */
12159 if ((TREE_CODE (arg0) == COMPONENT_REF
12160 || TREE_CODE (arg0) == BIT_FIELD_REF)
12161 /* Handle the constant case even without -O
12162 to make sure the warnings are given. */
12163 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12165 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12170 /* Optimize comparisons of strlen vs zero to a compare of the
12171 first character of the string vs zero. To wit,
12172 strlen(ptr) == 0 => *ptr == 0
12173 strlen(ptr) != 0 => *ptr != 0
12174 Other cases should reduce to one of these two (or a constant)
12175 due to the return value of strlen being unsigned. */
12176 if (TREE_CODE (arg0) == CALL_EXPR
12177 && integer_zerop (arg1))
12179 tree fndecl = get_callee_fndecl (arg0);
12182 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12183 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12184 && call_expr_nargs (arg0) == 1
12185 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12187 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12188 return fold_build2 (code, type, iref,
12189 build_int_cst (TREE_TYPE (iref), 0));
12193 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12194 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12195 if (TREE_CODE (arg0) == RSHIFT_EXPR
12196 && integer_zerop (arg1)
12197 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12199 tree arg00 = TREE_OPERAND (arg0, 0);
12200 tree arg01 = TREE_OPERAND (arg0, 1);
12201 tree itype = TREE_TYPE (arg00);
12202 if (TREE_INT_CST_HIGH (arg01) == 0
12203 && TREE_INT_CST_LOW (arg01)
12204 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12206 if (TYPE_UNSIGNED (itype))
12208 itype = signed_type_for (itype);
12209 arg00 = fold_convert (itype, arg00);
12211 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12212 type, arg00, build_int_cst (itype, 0));
12216 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12217 if (integer_zerop (arg1)
12218 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12219 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12220 TREE_OPERAND (arg0, 1));
12222 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12223 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12224 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12225 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12226 build_int_cst (TREE_TYPE (arg1), 0));
12227 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12228 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12229 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12230 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12231 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12232 build_int_cst (TREE_TYPE (arg1), 0));
12234 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12235 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12236 && TREE_CODE (arg1) == INTEGER_CST
12237 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12238 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12239 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12240 TREE_OPERAND (arg0, 1), arg1));
12242 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12243 (X & C) == 0 when C is a single bit. */
12244 if (TREE_CODE (arg0) == BIT_AND_EXPR
12245 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12246 && integer_zerop (arg1)
12247 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12249 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12250 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12251 TREE_OPERAND (arg0, 1));
12252 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12256 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12257 constant C is a power of two, i.e. a single bit. */
12258 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12259 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12260 && integer_zerop (arg1)
12261 && integer_pow2p (TREE_OPERAND (arg0, 1))
12262 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12263 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12265 tree arg00 = TREE_OPERAND (arg0, 0);
12266 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12267 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12270 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12271 when is C is a power of two, i.e. a single bit. */
12272 if (TREE_CODE (arg0) == BIT_AND_EXPR
12273 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12274 && integer_zerop (arg1)
12275 && integer_pow2p (TREE_OPERAND (arg0, 1))
12276 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12277 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12279 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12280 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12281 arg000, TREE_OPERAND (arg0, 1));
12282 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12283 tem, build_int_cst (TREE_TYPE (tem), 0));
12286 if (integer_zerop (arg1)
12287 && tree_expr_nonzero_p (arg0))
12289 tree res = constant_boolean_node (code==NE_EXPR, type);
12290 return omit_one_operand (type, res, arg0);
12293 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12294 if (TREE_CODE (arg0) == NEGATE_EXPR
12295 && TREE_CODE (arg1) == NEGATE_EXPR)
12296 return fold_build2 (code, type,
12297 TREE_OPERAND (arg0, 0),
12298 TREE_OPERAND (arg1, 0));
12300 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12301 if (TREE_CODE (arg0) == BIT_AND_EXPR
12302 && TREE_CODE (arg1) == BIT_AND_EXPR)
12304 tree arg00 = TREE_OPERAND (arg0, 0);
12305 tree arg01 = TREE_OPERAND (arg0, 1);
12306 tree arg10 = TREE_OPERAND (arg1, 0);
12307 tree arg11 = TREE_OPERAND (arg1, 1);
12308 tree itype = TREE_TYPE (arg0);
12310 if (operand_equal_p (arg01, arg11, 0))
12311 return fold_build2 (code, type,
12312 fold_build2 (BIT_AND_EXPR, itype,
12313 fold_build2 (BIT_XOR_EXPR, itype,
12316 build_int_cst (itype, 0));
12318 if (operand_equal_p (arg01, arg10, 0))
12319 return fold_build2 (code, type,
12320 fold_build2 (BIT_AND_EXPR, itype,
12321 fold_build2 (BIT_XOR_EXPR, itype,
12324 build_int_cst (itype, 0));
12326 if (operand_equal_p (arg00, arg11, 0))
12327 return fold_build2 (code, type,
12328 fold_build2 (BIT_AND_EXPR, itype,
12329 fold_build2 (BIT_XOR_EXPR, itype,
12332 build_int_cst (itype, 0));
12334 if (operand_equal_p (arg00, arg10, 0))
12335 return fold_build2 (code, type,
12336 fold_build2 (BIT_AND_EXPR, itype,
12337 fold_build2 (BIT_XOR_EXPR, itype,
12340 build_int_cst (itype, 0));
12343 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12344 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12346 tree arg00 = TREE_OPERAND (arg0, 0);
12347 tree arg01 = TREE_OPERAND (arg0, 1);
12348 tree arg10 = TREE_OPERAND (arg1, 0);
12349 tree arg11 = TREE_OPERAND (arg1, 1);
12350 tree itype = TREE_TYPE (arg0);
12352 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12353 operand_equal_p guarantees no side-effects so we don't need
12354 to use omit_one_operand on Z. */
12355 if (operand_equal_p (arg01, arg11, 0))
12356 return fold_build2 (code, type, arg00, arg10);
12357 if (operand_equal_p (arg01, arg10, 0))
12358 return fold_build2 (code, type, arg00, arg11);
12359 if (operand_equal_p (arg00, arg11, 0))
12360 return fold_build2 (code, type, arg01, arg10);
12361 if (operand_equal_p (arg00, arg10, 0))
12362 return fold_build2 (code, type, arg01, arg11);
12364 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12365 if (TREE_CODE (arg01) == INTEGER_CST
12366 && TREE_CODE (arg11) == INTEGER_CST)
12367 return fold_build2 (code, type,
12368 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12369 fold_build2 (BIT_XOR_EXPR, itype,
12374 /* Attempt to simplify equality/inequality comparisons of complex
12375 values. Only lower the comparison if the result is known or
12376 can be simplified to a single scalar comparison. */
12377 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12378 || TREE_CODE (arg0) == COMPLEX_CST)
12379 && (TREE_CODE (arg1) == COMPLEX_EXPR
12380 || TREE_CODE (arg1) == COMPLEX_CST))
12382 tree real0, imag0, real1, imag1;
12385 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12387 real0 = TREE_OPERAND (arg0, 0);
12388 imag0 = TREE_OPERAND (arg0, 1);
12392 real0 = TREE_REALPART (arg0);
12393 imag0 = TREE_IMAGPART (arg0);
12396 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12398 real1 = TREE_OPERAND (arg1, 0);
12399 imag1 = TREE_OPERAND (arg1, 1);
12403 real1 = TREE_REALPART (arg1);
12404 imag1 = TREE_IMAGPART (arg1);
12407 rcond = fold_binary (code, type, real0, real1);
12408 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12410 if (integer_zerop (rcond))
12412 if (code == EQ_EXPR)
12413 return omit_two_operands (type, boolean_false_node,
12415 return fold_build2 (NE_EXPR, type, imag0, imag1);
12419 if (code == NE_EXPR)
12420 return omit_two_operands (type, boolean_true_node,
12422 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12426 icond = fold_binary (code, type, imag0, imag1);
12427 if (icond && TREE_CODE (icond) == INTEGER_CST)
12429 if (integer_zerop (icond))
12431 if (code == EQ_EXPR)
12432 return omit_two_operands (type, boolean_false_node,
12434 return fold_build2 (NE_EXPR, type, real0, real1);
12438 if (code == NE_EXPR)
12439 return omit_two_operands (type, boolean_true_node,
12441 return fold_build2 (EQ_EXPR, type, real0, real1);
12452 tem = fold_comparison (code, type, op0, op1);
12453 if (tem != NULL_TREE)
12456 /* Transform comparisons of the form X +- C CMP X. */
12457 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12459 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12460 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12461 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12462 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12464 tree arg01 = TREE_OPERAND (arg0, 1);
12465 enum tree_code code0 = TREE_CODE (arg0);
12468 if (TREE_CODE (arg01) == REAL_CST)
12469 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12471 is_positive = tree_int_cst_sgn (arg01);
12473 /* (X - c) > X becomes false. */
12474 if (code == GT_EXPR
12475 && ((code0 == MINUS_EXPR && is_positive >= 0)
12476 || (code0 == PLUS_EXPR && is_positive <= 0)))
12478 if (TREE_CODE (arg01) == INTEGER_CST
12479 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12480 fold_overflow_warning (("assuming signed overflow does not "
12481 "occur when assuming that (X - c) > X "
12482 "is always false"),
12483 WARN_STRICT_OVERFLOW_ALL);
12484 return constant_boolean_node (0, type);
12487 /* Likewise (X + c) < X becomes false. */
12488 if (code == LT_EXPR
12489 && ((code0 == PLUS_EXPR && is_positive >= 0)
12490 || (code0 == MINUS_EXPR && is_positive <= 0)))
12492 if (TREE_CODE (arg01) == INTEGER_CST
12493 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12494 fold_overflow_warning (("assuming signed overflow does not "
12495 "occur when assuming that "
12496 "(X + c) < X is always false"),
12497 WARN_STRICT_OVERFLOW_ALL);
12498 return constant_boolean_node (0, type);
12501 /* Convert (X - c) <= X to true. */
12502 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12504 && ((code0 == MINUS_EXPR && is_positive >= 0)
12505 || (code0 == PLUS_EXPR && is_positive <= 0)))
12507 if (TREE_CODE (arg01) == INTEGER_CST
12508 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12509 fold_overflow_warning (("assuming signed overflow does not "
12510 "occur when assuming that "
12511 "(X - c) <= X is always true"),
12512 WARN_STRICT_OVERFLOW_ALL);
12513 return constant_boolean_node (1, type);
12516 /* Convert (X + c) >= X to true. */
12517 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12519 && ((code0 == PLUS_EXPR && is_positive >= 0)
12520 || (code0 == MINUS_EXPR && is_positive <= 0)))
12522 if (TREE_CODE (arg01) == INTEGER_CST
12523 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12524 fold_overflow_warning (("assuming signed overflow does not "
12525 "occur when assuming that "
12526 "(X + c) >= X is always true"),
12527 WARN_STRICT_OVERFLOW_ALL);
12528 return constant_boolean_node (1, type);
12531 if (TREE_CODE (arg01) == INTEGER_CST)
12533 /* Convert X + c > X and X - c < X to true for integers. */
12534 if (code == GT_EXPR
12535 && ((code0 == PLUS_EXPR && is_positive > 0)
12536 || (code0 == MINUS_EXPR && is_positive < 0)))
12538 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12539 fold_overflow_warning (("assuming signed overflow does "
12540 "not occur when assuming that "
12541 "(X + c) > X is always true"),
12542 WARN_STRICT_OVERFLOW_ALL);
12543 return constant_boolean_node (1, type);
12546 if (code == LT_EXPR
12547 && ((code0 == MINUS_EXPR && is_positive > 0)
12548 || (code0 == PLUS_EXPR && is_positive < 0)))
12550 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12551 fold_overflow_warning (("assuming signed overflow does "
12552 "not occur when assuming that "
12553 "(X - c) < X is always true"),
12554 WARN_STRICT_OVERFLOW_ALL);
12555 return constant_boolean_node (1, type);
12558 /* Convert X + c <= X and X - c >= X to false for integers. */
12559 if (code == LE_EXPR
12560 && ((code0 == PLUS_EXPR && is_positive > 0)
12561 || (code0 == MINUS_EXPR && is_positive < 0)))
12563 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12564 fold_overflow_warning (("assuming signed overflow does "
12565 "not occur when assuming that "
12566 "(X + c) <= X is always false"),
12567 WARN_STRICT_OVERFLOW_ALL);
12568 return constant_boolean_node (0, type);
12571 if (code == GE_EXPR
12572 && ((code0 == MINUS_EXPR && is_positive > 0)
12573 || (code0 == PLUS_EXPR && is_positive < 0)))
12575 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12576 fold_overflow_warning (("assuming signed overflow does "
12577 "not occur when assuming that "
12578 "(X - c) >= X is always false"),
12579 WARN_STRICT_OVERFLOW_ALL);
12580 return constant_boolean_node (0, type);
12585 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12586 This transformation affects the cases which are handled in later
12587 optimizations involving comparisons with non-negative constants. */
12588 if (TREE_CODE (arg1) == INTEGER_CST
12589 && TREE_CODE (arg0) != INTEGER_CST
12590 && tree_int_cst_sgn (arg1) > 0)
12592 if (code == GE_EXPR)
12594 arg1 = const_binop (MINUS_EXPR, arg1,
12595 build_int_cst (TREE_TYPE (arg1), 1), 0);
12596 return fold_build2 (GT_EXPR, type, arg0,
12597 fold_convert (TREE_TYPE (arg0), arg1));
12599 if (code == LT_EXPR)
12601 arg1 = const_binop (MINUS_EXPR, arg1,
12602 build_int_cst (TREE_TYPE (arg1), 1), 0);
12603 return fold_build2 (LE_EXPR, type, arg0,
12604 fold_convert (TREE_TYPE (arg0), arg1));
12608 /* Comparisons with the highest or lowest possible integer of
12609 the specified precision will have known values. */
12611 tree arg1_type = TREE_TYPE (arg1);
12612 unsigned int width = TYPE_PRECISION (arg1_type);
12614 if (TREE_CODE (arg1) == INTEGER_CST
12615 && !TREE_OVERFLOW (arg1)
12616 && width <= 2 * HOST_BITS_PER_WIDE_INT
12617 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12619 HOST_WIDE_INT signed_max_hi;
12620 unsigned HOST_WIDE_INT signed_max_lo;
12621 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12623 if (width <= HOST_BITS_PER_WIDE_INT)
12625 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12630 if (TYPE_UNSIGNED (arg1_type))
12632 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12638 max_lo = signed_max_lo;
12639 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12645 width -= HOST_BITS_PER_WIDE_INT;
12646 signed_max_lo = -1;
12647 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12652 if (TYPE_UNSIGNED (arg1_type))
12654 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12659 max_hi = signed_max_hi;
12660 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12664 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12665 && TREE_INT_CST_LOW (arg1) == max_lo)
12669 return omit_one_operand (type, integer_zero_node, arg0);
12672 return fold_build2 (EQ_EXPR, type, op0, op1);
12675 return omit_one_operand (type, integer_one_node, arg0);
12678 return fold_build2 (NE_EXPR, type, op0, op1);
12680 /* The GE_EXPR and LT_EXPR cases above are not normally
12681 reached because of previous transformations. */
12686 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12688 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12692 arg1 = const_binop (PLUS_EXPR, arg1,
12693 build_int_cst (TREE_TYPE (arg1), 1), 0);
12694 return fold_build2 (EQ_EXPR, type,
12695 fold_convert (TREE_TYPE (arg1), arg0),
12698 arg1 = const_binop (PLUS_EXPR, arg1,
12699 build_int_cst (TREE_TYPE (arg1), 1), 0);
12700 return fold_build2 (NE_EXPR, type,
12701 fold_convert (TREE_TYPE (arg1), arg0),
12706 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12708 && TREE_INT_CST_LOW (arg1) == min_lo)
12712 return omit_one_operand (type, integer_zero_node, arg0);
12715 return fold_build2 (EQ_EXPR, type, op0, op1);
12718 return omit_one_operand (type, integer_one_node, arg0);
12721 return fold_build2 (NE_EXPR, type, op0, op1);
12726 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12728 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12732 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12733 return fold_build2 (NE_EXPR, type,
12734 fold_convert (TREE_TYPE (arg1), arg0),
12737 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12738 return fold_build2 (EQ_EXPR, type,
12739 fold_convert (TREE_TYPE (arg1), arg0),
12745 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12746 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12747 && TYPE_UNSIGNED (arg1_type)
12748 /* We will flip the signedness of the comparison operator
12749 associated with the mode of arg1, so the sign bit is
12750 specified by this mode. Check that arg1 is the signed
12751 max associated with this sign bit. */
12752 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12753 /* signed_type does not work on pointer types. */
12754 && INTEGRAL_TYPE_P (arg1_type))
12756 /* The following case also applies to X < signed_max+1
12757 and X >= signed_max+1 because previous transformations. */
12758 if (code == LE_EXPR || code == GT_EXPR)
12761 st = signed_type_for (TREE_TYPE (arg1));
12762 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12763 type, fold_convert (st, arg0),
12764 build_int_cst (st, 0));
12770 /* If we are comparing an ABS_EXPR with a constant, we can
12771 convert all the cases into explicit comparisons, but they may
12772 well not be faster than doing the ABS and one comparison.
12773 But ABS (X) <= C is a range comparison, which becomes a subtraction
12774 and a comparison, and is probably faster. */
12775 if (code == LE_EXPR
12776 && TREE_CODE (arg1) == INTEGER_CST
12777 && TREE_CODE (arg0) == ABS_EXPR
12778 && ! TREE_SIDE_EFFECTS (arg0)
12779 && (0 != (tem = negate_expr (arg1)))
12780 && TREE_CODE (tem) == INTEGER_CST
12781 && !TREE_OVERFLOW (tem))
12782 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12783 build2 (GE_EXPR, type,
12784 TREE_OPERAND (arg0, 0), tem),
12785 build2 (LE_EXPR, type,
12786 TREE_OPERAND (arg0, 0), arg1));
12788 /* Convert ABS_EXPR<x> >= 0 to true. */
12789 strict_overflow_p = false;
12790 if (code == GE_EXPR
12791 && (integer_zerop (arg1)
12792 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12793 && real_zerop (arg1)))
12794 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12796 if (strict_overflow_p)
12797 fold_overflow_warning (("assuming signed overflow does not occur "
12798 "when simplifying comparison of "
12799 "absolute value and zero"),
12800 WARN_STRICT_OVERFLOW_CONDITIONAL);
12801 return omit_one_operand (type, integer_one_node, arg0);
12804 /* Convert ABS_EXPR<x> < 0 to false. */
12805 strict_overflow_p = false;
12806 if (code == LT_EXPR
12807 && (integer_zerop (arg1) || real_zerop (arg1))
12808 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12810 if (strict_overflow_p)
12811 fold_overflow_warning (("assuming signed overflow does not occur "
12812 "when simplifying comparison of "
12813 "absolute value and zero"),
12814 WARN_STRICT_OVERFLOW_CONDITIONAL);
12815 return omit_one_operand (type, integer_zero_node, arg0);
12818 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12819 and similarly for >= into !=. */
12820 if ((code == LT_EXPR || code == GE_EXPR)
12821 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12822 && TREE_CODE (arg1) == LSHIFT_EXPR
12823 && integer_onep (TREE_OPERAND (arg1, 0)))
12824 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12825 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12826 TREE_OPERAND (arg1, 1)),
12827 build_int_cst (TREE_TYPE (arg0), 0));
12829 if ((code == LT_EXPR || code == GE_EXPR)
12830 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12831 && (TREE_CODE (arg1) == NOP_EXPR
12832 || TREE_CODE (arg1) == CONVERT_EXPR)
12833 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12834 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12836 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12837 fold_convert (TREE_TYPE (arg0),
12838 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12839 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12841 build_int_cst (TREE_TYPE (arg0), 0));
12845 case UNORDERED_EXPR:
12853 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12855 t1 = fold_relational_const (code, type, arg0, arg1);
12856 if (t1 != NULL_TREE)
12860 /* If the first operand is NaN, the result is constant. */
12861 if (TREE_CODE (arg0) == REAL_CST
12862 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12863 && (code != LTGT_EXPR || ! flag_trapping_math))
12865 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12866 ? integer_zero_node
12867 : integer_one_node;
12868 return omit_one_operand (type, t1, arg1);
12871 /* If the second operand is NaN, the result is constant. */
12872 if (TREE_CODE (arg1) == REAL_CST
12873 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12874 && (code != LTGT_EXPR || ! flag_trapping_math))
12876 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12877 ? integer_zero_node
12878 : integer_one_node;
12879 return omit_one_operand (type, t1, arg0);
12882 /* Simplify unordered comparison of something with itself. */
12883 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12884 && operand_equal_p (arg0, arg1, 0))
12885 return constant_boolean_node (1, type);
12887 if (code == LTGT_EXPR
12888 && !flag_trapping_math
12889 && operand_equal_p (arg0, arg1, 0))
12890 return constant_boolean_node (0, type);
12892 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12894 tree targ0 = strip_float_extensions (arg0);
12895 tree targ1 = strip_float_extensions (arg1);
12896 tree newtype = TREE_TYPE (targ0);
12898 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12899 newtype = TREE_TYPE (targ1);
12901 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12902 return fold_build2 (code, type, fold_convert (newtype, targ0),
12903 fold_convert (newtype, targ1));
12908 case COMPOUND_EXPR:
12909 /* When pedantic, a compound expression can be neither an lvalue
12910 nor an integer constant expression. */
12911 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12913 /* Don't let (0, 0) be null pointer constant. */
12914 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12915 : fold_convert (type, arg1);
12916 return pedantic_non_lvalue (tem);
12919 if ((TREE_CODE (arg0) == REAL_CST
12920 && TREE_CODE (arg1) == REAL_CST)
12921 || (TREE_CODE (arg0) == INTEGER_CST
12922 && TREE_CODE (arg1) == INTEGER_CST))
12923 return build_complex (type, arg0, arg1);
12927 /* An ASSERT_EXPR should never be passed to fold_binary. */
12928 gcc_unreachable ();
12932 } /* switch (code) */
12935 /* Callback for walk_tree, looking for LABEL_EXPR.
12936 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12937 Do not check the sub-tree of GOTO_EXPR. */
12940 contains_label_1 (tree *tp,
12941 int *walk_subtrees,
12942 void *data ATTRIBUTE_UNUSED)
12944 switch (TREE_CODE (*tp))
12949 *walk_subtrees = 0;
12956 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12957 accessible from outside the sub-tree. Returns NULL_TREE if no
12958 addressable label is found. */
12961 contains_label_p (tree st)
12963 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12966 /* Fold a ternary expression of code CODE and type TYPE with operands
12967 OP0, OP1, and OP2. Return the folded expression if folding is
12968 successful. Otherwise, return NULL_TREE. */
12971 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12974 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12975 enum tree_code_class kind = TREE_CODE_CLASS (code);
12977 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12978 && TREE_CODE_LENGTH (code) == 3);
12980 /* Strip any conversions that don't change the mode. This is safe
12981 for every expression, except for a comparison expression because
12982 its signedness is derived from its operands. So, in the latter
12983 case, only strip conversions that don't change the signedness.
12985 Note that this is done as an internal manipulation within the
12986 constant folder, in order to find the simplest representation of
12987 the arguments so that their form can be studied. In any cases,
12988 the appropriate type conversions should be put back in the tree
12989 that will get out of the constant folder. */
13004 case COMPONENT_REF:
13005 if (TREE_CODE (arg0) == CONSTRUCTOR
13006 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13008 unsigned HOST_WIDE_INT idx;
13010 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13017 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13018 so all simple results must be passed through pedantic_non_lvalue. */
13019 if (TREE_CODE (arg0) == INTEGER_CST)
13021 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13022 tem = integer_zerop (arg0) ? op2 : op1;
13023 /* Only optimize constant conditions when the selected branch
13024 has the same type as the COND_EXPR. This avoids optimizing
13025 away "c ? x : throw", where the throw has a void type.
13026 Avoid throwing away that operand which contains label. */
13027 if ((!TREE_SIDE_EFFECTS (unused_op)
13028 || !contains_label_p (unused_op))
13029 && (! VOID_TYPE_P (TREE_TYPE (tem))
13030 || VOID_TYPE_P (type)))
13031 return pedantic_non_lvalue (tem);
13034 if (operand_equal_p (arg1, op2, 0))
13035 return pedantic_omit_one_operand (type, arg1, arg0);
13037 /* If we have A op B ? A : C, we may be able to convert this to a
13038 simpler expression, depending on the operation and the values
13039 of B and C. Signed zeros prevent all of these transformations,
13040 for reasons given above each one.
13042 Also try swapping the arguments and inverting the conditional. */
13043 if (COMPARISON_CLASS_P (arg0)
13044 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13045 arg1, TREE_OPERAND (arg0, 1))
13046 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13048 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13053 if (COMPARISON_CLASS_P (arg0)
13054 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13056 TREE_OPERAND (arg0, 1))
13057 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13059 tem = fold_truth_not_expr (arg0);
13060 if (tem && COMPARISON_CLASS_P (tem))
13062 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13068 /* If the second operand is simpler than the third, swap them
13069 since that produces better jump optimization results. */
13070 if (truth_value_p (TREE_CODE (arg0))
13071 && tree_swap_operands_p (op1, op2, false))
13073 /* See if this can be inverted. If it can't, possibly because
13074 it was a floating-point inequality comparison, don't do
13076 tem = fold_truth_not_expr (arg0);
13078 return fold_build3 (code, type, tem, op2, op1);
13081 /* Convert A ? 1 : 0 to simply A. */
13082 if (integer_onep (op1)
13083 && integer_zerop (op2)
13084 /* If we try to convert OP0 to our type, the
13085 call to fold will try to move the conversion inside
13086 a COND, which will recurse. In that case, the COND_EXPR
13087 is probably the best choice, so leave it alone. */
13088 && type == TREE_TYPE (arg0))
13089 return pedantic_non_lvalue (arg0);
13091 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13092 over COND_EXPR in cases such as floating point comparisons. */
13093 if (integer_zerop (op1)
13094 && integer_onep (op2)
13095 && truth_value_p (TREE_CODE (arg0)))
13096 return pedantic_non_lvalue (fold_convert (type,
13097 invert_truthvalue (arg0)));
13099 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13100 if (TREE_CODE (arg0) == LT_EXPR
13101 && integer_zerop (TREE_OPERAND (arg0, 1))
13102 && integer_zerop (op2)
13103 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13105 /* sign_bit_p only checks ARG1 bits within A's precision.
13106 If <sign bit of A> has wider type than A, bits outside
13107 of A's precision in <sign bit of A> need to be checked.
13108 If they are all 0, this optimization needs to be done
13109 in unsigned A's type, if they are all 1 in signed A's type,
13110 otherwise this can't be done. */
13111 if (TYPE_PRECISION (TREE_TYPE (tem))
13112 < TYPE_PRECISION (TREE_TYPE (arg1))
13113 && TYPE_PRECISION (TREE_TYPE (tem))
13114 < TYPE_PRECISION (type))
13116 unsigned HOST_WIDE_INT mask_lo;
13117 HOST_WIDE_INT mask_hi;
13118 int inner_width, outer_width;
13121 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13122 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13123 if (outer_width > TYPE_PRECISION (type))
13124 outer_width = TYPE_PRECISION (type);
13126 if (outer_width > HOST_BITS_PER_WIDE_INT)
13128 mask_hi = ((unsigned HOST_WIDE_INT) -1
13129 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13135 mask_lo = ((unsigned HOST_WIDE_INT) -1
13136 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13138 if (inner_width > HOST_BITS_PER_WIDE_INT)
13140 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13141 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13145 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13146 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13148 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13149 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13151 tem_type = signed_type_for (TREE_TYPE (tem));
13152 tem = fold_convert (tem_type, tem);
13154 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13155 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13157 tem_type = unsigned_type_for (TREE_TYPE (tem));
13158 tem = fold_convert (tem_type, tem);
13165 return fold_convert (type,
13166 fold_build2 (BIT_AND_EXPR,
13167 TREE_TYPE (tem), tem,
13168 fold_convert (TREE_TYPE (tem),
13172 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13173 already handled above. */
13174 if (TREE_CODE (arg0) == BIT_AND_EXPR
13175 && integer_onep (TREE_OPERAND (arg0, 1))
13176 && integer_zerop (op2)
13177 && integer_pow2p (arg1))
13179 tree tem = TREE_OPERAND (arg0, 0);
13181 if (TREE_CODE (tem) == RSHIFT_EXPR
13182 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13183 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13184 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13185 return fold_build2 (BIT_AND_EXPR, type,
13186 TREE_OPERAND (tem, 0), arg1);
13189 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13190 is probably obsolete because the first operand should be a
13191 truth value (that's why we have the two cases above), but let's
13192 leave it in until we can confirm this for all front-ends. */
13193 if (integer_zerop (op2)
13194 && TREE_CODE (arg0) == NE_EXPR
13195 && integer_zerop (TREE_OPERAND (arg0, 1))
13196 && integer_pow2p (arg1)
13197 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13198 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13199 arg1, OEP_ONLY_CONST))
13200 return pedantic_non_lvalue (fold_convert (type,
13201 TREE_OPERAND (arg0, 0)));
13203 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13204 if (integer_zerop (op2)
13205 && truth_value_p (TREE_CODE (arg0))
13206 && truth_value_p (TREE_CODE (arg1)))
13207 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13208 fold_convert (type, arg0),
13211 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13212 if (integer_onep (op2)
13213 && truth_value_p (TREE_CODE (arg0))
13214 && truth_value_p (TREE_CODE (arg1)))
13216 /* Only perform transformation if ARG0 is easily inverted. */
13217 tem = fold_truth_not_expr (arg0);
13219 return fold_build2 (TRUTH_ORIF_EXPR, type,
13220 fold_convert (type, tem),
13224 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13225 if (integer_zerop (arg1)
13226 && truth_value_p (TREE_CODE (arg0))
13227 && truth_value_p (TREE_CODE (op2)))
13229 /* Only perform transformation if ARG0 is easily inverted. */
13230 tem = fold_truth_not_expr (arg0);
13232 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13233 fold_convert (type, tem),
13237 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13238 if (integer_onep (arg1)
13239 && truth_value_p (TREE_CODE (arg0))
13240 && truth_value_p (TREE_CODE (op2)))
13241 return fold_build2 (TRUTH_ORIF_EXPR, type,
13242 fold_convert (type, arg0),
13248 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13249 of fold_ternary on them. */
13250 gcc_unreachable ();
13252 case BIT_FIELD_REF:
13253 if ((TREE_CODE (arg0) == VECTOR_CST
13254 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13255 && type == TREE_TYPE (TREE_TYPE (arg0))
13256 && host_integerp (arg1, 1)
13257 && host_integerp (op2, 1))
13259 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13260 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13263 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13264 && (idx % width) == 0
13265 && (idx = idx / width)
13266 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13268 tree elements = NULL_TREE;
13270 if (TREE_CODE (arg0) == VECTOR_CST)
13271 elements = TREE_VECTOR_CST_ELTS (arg0);
13274 unsigned HOST_WIDE_INT idx;
13277 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13278 elements = tree_cons (NULL_TREE, value, elements);
13280 while (idx-- > 0 && elements)
13281 elements = TREE_CHAIN (elements);
13283 return TREE_VALUE (elements);
13285 return fold_convert (type, integer_zero_node);
13292 } /* switch (code) */
13295 /* Perform constant folding and related simplification of EXPR.
13296 The related simplifications include x*1 => x, x*0 => 0, etc.,
13297 and application of the associative law.
13298 NOP_EXPR conversions may be removed freely (as long as we
13299 are careful not to change the type of the overall expression).
13300 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13301 but we can constant-fold them if they have constant operands. */
13303 #ifdef ENABLE_FOLD_CHECKING
13304 # define fold(x) fold_1 (x)
13305 static tree fold_1 (tree);
13311 const tree t = expr;
13312 enum tree_code code = TREE_CODE (t);
13313 enum tree_code_class kind = TREE_CODE_CLASS (code);
13316 /* Return right away if a constant. */
13317 if (kind == tcc_constant)
13320 /* CALL_EXPR-like objects with variable numbers of operands are
13321 treated specially. */
13322 if (kind == tcc_vl_exp)
13324 if (code == CALL_EXPR)
13326 tem = fold_call_expr (expr, false);
13327 return tem ? tem : expr;
13332 if (IS_EXPR_CODE_CLASS (kind)
13333 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13335 tree type = TREE_TYPE (t);
13336 tree op0, op1, op2;
13338 switch (TREE_CODE_LENGTH (code))
13341 op0 = TREE_OPERAND (t, 0);
13342 tem = fold_unary (code, type, op0);
13343 return tem ? tem : expr;
13345 op0 = TREE_OPERAND (t, 0);
13346 op1 = TREE_OPERAND (t, 1);
13347 tem = fold_binary (code, type, op0, op1);
13348 return tem ? tem : expr;
13350 op0 = TREE_OPERAND (t, 0);
13351 op1 = TREE_OPERAND (t, 1);
13352 op2 = TREE_OPERAND (t, 2);
13353 tem = fold_ternary (code, type, op0, op1, op2);
13354 return tem ? tem : expr;
13363 return fold (DECL_INITIAL (t));
13367 } /* switch (code) */
13370 #ifdef ENABLE_FOLD_CHECKING
13373 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13374 static void fold_check_failed (const_tree, const_tree);
13375 void print_fold_checksum (const_tree);
13377 /* When --enable-checking=fold, compute a digest of expr before
13378 and after actual fold call to see if fold did not accidentally
13379 change original expr. */
13385 struct md5_ctx ctx;
13386 unsigned char checksum_before[16], checksum_after[16];
13389 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13390 md5_init_ctx (&ctx);
13391 fold_checksum_tree (expr, &ctx, ht);
13392 md5_finish_ctx (&ctx, checksum_before);
13395 ret = fold_1 (expr);
13397 md5_init_ctx (&ctx);
13398 fold_checksum_tree (expr, &ctx, ht);
13399 md5_finish_ctx (&ctx, checksum_after);
13402 if (memcmp (checksum_before, checksum_after, 16))
13403 fold_check_failed (expr, ret);
13409 print_fold_checksum (const_tree expr)
13411 struct md5_ctx ctx;
13412 unsigned char checksum[16], cnt;
13415 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13416 md5_init_ctx (&ctx);
13417 fold_checksum_tree (expr, &ctx, ht);
13418 md5_finish_ctx (&ctx, checksum);
13420 for (cnt = 0; cnt < 16; ++cnt)
13421 fprintf (stderr, "%02x", checksum[cnt]);
13422 putc ('\n', stderr);
13426 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13428 internal_error ("fold check: original tree changed by fold");
13432 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13435 enum tree_code code;
13436 struct tree_function_decl buf;
13441 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13442 <= sizeof (struct tree_function_decl))
13443 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13446 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13450 code = TREE_CODE (expr);
13451 if (TREE_CODE_CLASS (code) == tcc_declaration
13452 && DECL_ASSEMBLER_NAME_SET_P (expr))
13454 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13455 memcpy ((char *) &buf, expr, tree_size (expr));
13456 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13457 expr = (tree) &buf;
13459 else if (TREE_CODE_CLASS (code) == tcc_type
13460 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13461 || TYPE_CACHED_VALUES_P (expr)
13462 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13464 /* Allow these fields to be modified. */
13466 memcpy ((char *) &buf, expr, tree_size (expr));
13467 expr = tmp = (tree) &buf;
13468 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13469 TYPE_POINTER_TO (tmp) = NULL;
13470 TYPE_REFERENCE_TO (tmp) = NULL;
13471 if (TYPE_CACHED_VALUES_P (tmp))
13473 TYPE_CACHED_VALUES_P (tmp) = 0;
13474 TYPE_CACHED_VALUES (tmp) = NULL;
13477 md5_process_bytes (expr, tree_size (expr), ctx);
13478 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13479 if (TREE_CODE_CLASS (code) != tcc_type
13480 && TREE_CODE_CLASS (code) != tcc_declaration
13481 && code != TREE_LIST
13482 && code != SSA_NAME)
13483 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13484 switch (TREE_CODE_CLASS (code))
13490 md5_process_bytes (TREE_STRING_POINTER (expr),
13491 TREE_STRING_LENGTH (expr), ctx);
13494 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13495 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13498 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13504 case tcc_exceptional:
13508 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13509 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13510 expr = TREE_CHAIN (expr);
13511 goto recursive_label;
13514 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13515 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13521 case tcc_expression:
13522 case tcc_reference:
13523 case tcc_comparison:
13526 case tcc_statement:
13528 len = TREE_OPERAND_LENGTH (expr);
13529 for (i = 0; i < len; ++i)
13530 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13532 case tcc_declaration:
13533 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13534 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13535 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13537 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13538 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13539 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13540 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13541 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13543 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13544 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13546 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13548 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13549 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13550 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13554 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13555 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13556 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13557 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13558 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13559 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13560 if (INTEGRAL_TYPE_P (expr)
13561 || SCALAR_FLOAT_TYPE_P (expr))
13563 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13564 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13566 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13567 if (TREE_CODE (expr) == RECORD_TYPE
13568 || TREE_CODE (expr) == UNION_TYPE
13569 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13570 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13571 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13578 /* Helper function for outputting the checksum of a tree T. When
13579 debugging with gdb, you can "define mynext" to be "next" followed
13580 by "call debug_fold_checksum (op0)", then just trace down till the
13584 debug_fold_checksum (const_tree t)
13587 unsigned char checksum[16];
13588 struct md5_ctx ctx;
13589 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13591 md5_init_ctx (&ctx);
13592 fold_checksum_tree (t, &ctx, ht);
13593 md5_finish_ctx (&ctx, checksum);
13596 for (i = 0; i < 16; i++)
13597 fprintf (stderr, "%d ", checksum[i]);
13599 fprintf (stderr, "\n");
13604 /* Fold a unary tree expression with code CODE of type TYPE with an
13605 operand OP0. Return a folded expression if successful. Otherwise,
13606 return a tree expression with code CODE of type TYPE with an
13610 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13613 #ifdef ENABLE_FOLD_CHECKING
13614 unsigned char checksum_before[16], checksum_after[16];
13615 struct md5_ctx ctx;
13618 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13619 md5_init_ctx (&ctx);
13620 fold_checksum_tree (op0, &ctx, ht);
13621 md5_finish_ctx (&ctx, checksum_before);
13625 tem = fold_unary (code, type, op0);
13627 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13629 #ifdef ENABLE_FOLD_CHECKING
13630 md5_init_ctx (&ctx);
13631 fold_checksum_tree (op0, &ctx, ht);
13632 md5_finish_ctx (&ctx, checksum_after);
13635 if (memcmp (checksum_before, checksum_after, 16))
13636 fold_check_failed (op0, tem);
13641 /* Fold a binary tree expression with code CODE of type TYPE with
13642 operands OP0 and OP1. Return a folded expression if successful.
13643 Otherwise, return a tree expression with code CODE of type TYPE
13644 with operands OP0 and OP1. */
13647 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13651 #ifdef ENABLE_FOLD_CHECKING
13652 unsigned char checksum_before_op0[16],
13653 checksum_before_op1[16],
13654 checksum_after_op0[16],
13655 checksum_after_op1[16];
13656 struct md5_ctx ctx;
13659 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13660 md5_init_ctx (&ctx);
13661 fold_checksum_tree (op0, &ctx, ht);
13662 md5_finish_ctx (&ctx, checksum_before_op0);
13665 md5_init_ctx (&ctx);
13666 fold_checksum_tree (op1, &ctx, ht);
13667 md5_finish_ctx (&ctx, checksum_before_op1);
13671 tem = fold_binary (code, type, op0, op1);
13673 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13675 #ifdef ENABLE_FOLD_CHECKING
13676 md5_init_ctx (&ctx);
13677 fold_checksum_tree (op0, &ctx, ht);
13678 md5_finish_ctx (&ctx, checksum_after_op0);
13681 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13682 fold_check_failed (op0, tem);
13684 md5_init_ctx (&ctx);
13685 fold_checksum_tree (op1, &ctx, ht);
13686 md5_finish_ctx (&ctx, checksum_after_op1);
13689 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13690 fold_check_failed (op1, tem);
13695 /* Fold a ternary tree expression with code CODE of type TYPE with
13696 operands OP0, OP1, and OP2. Return a folded expression if
13697 successful. Otherwise, return a tree expression with code CODE of
13698 type TYPE with operands OP0, OP1, and OP2. */
13701 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13705 #ifdef ENABLE_FOLD_CHECKING
13706 unsigned char checksum_before_op0[16],
13707 checksum_before_op1[16],
13708 checksum_before_op2[16],
13709 checksum_after_op0[16],
13710 checksum_after_op1[16],
13711 checksum_after_op2[16];
13712 struct md5_ctx ctx;
13715 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13716 md5_init_ctx (&ctx);
13717 fold_checksum_tree (op0, &ctx, ht);
13718 md5_finish_ctx (&ctx, checksum_before_op0);
13721 md5_init_ctx (&ctx);
13722 fold_checksum_tree (op1, &ctx, ht);
13723 md5_finish_ctx (&ctx, checksum_before_op1);
13726 md5_init_ctx (&ctx);
13727 fold_checksum_tree (op2, &ctx, ht);
13728 md5_finish_ctx (&ctx, checksum_before_op2);
13732 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13733 tem = fold_ternary (code, type, op0, op1, op2);
13735 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13737 #ifdef ENABLE_FOLD_CHECKING
13738 md5_init_ctx (&ctx);
13739 fold_checksum_tree (op0, &ctx, ht);
13740 md5_finish_ctx (&ctx, checksum_after_op0);
13743 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13744 fold_check_failed (op0, tem);
13746 md5_init_ctx (&ctx);
13747 fold_checksum_tree (op1, &ctx, ht);
13748 md5_finish_ctx (&ctx, checksum_after_op1);
13751 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13752 fold_check_failed (op1, tem);
13754 md5_init_ctx (&ctx);
13755 fold_checksum_tree (op2, &ctx, ht);
13756 md5_finish_ctx (&ctx, checksum_after_op2);
13759 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13760 fold_check_failed (op2, tem);
13765 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13766 arguments in ARGARRAY, and a null static chain.
13767 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13768 of type TYPE from the given operands as constructed by build_call_array. */
13771 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13774 #ifdef ENABLE_FOLD_CHECKING
13775 unsigned char checksum_before_fn[16],
13776 checksum_before_arglist[16],
13777 checksum_after_fn[16],
13778 checksum_after_arglist[16];
13779 struct md5_ctx ctx;
13783 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13784 md5_init_ctx (&ctx);
13785 fold_checksum_tree (fn, &ctx, ht);
13786 md5_finish_ctx (&ctx, checksum_before_fn);
13789 md5_init_ctx (&ctx);
13790 for (i = 0; i < nargs; i++)
13791 fold_checksum_tree (argarray[i], &ctx, ht);
13792 md5_finish_ctx (&ctx, checksum_before_arglist);
13796 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13798 #ifdef ENABLE_FOLD_CHECKING
13799 md5_init_ctx (&ctx);
13800 fold_checksum_tree (fn, &ctx, ht);
13801 md5_finish_ctx (&ctx, checksum_after_fn);
13804 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13805 fold_check_failed (fn, tem);
13807 md5_init_ctx (&ctx);
13808 for (i = 0; i < nargs; i++)
13809 fold_checksum_tree (argarray[i], &ctx, ht);
13810 md5_finish_ctx (&ctx, checksum_after_arglist);
13813 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13814 fold_check_failed (NULL_TREE, tem);
13819 /* Perform constant folding and related simplification of initializer
13820 expression EXPR. These behave identically to "fold_buildN" but ignore
13821 potential run-time traps and exceptions that fold must preserve. */
13823 #define START_FOLD_INIT \
13824 int saved_signaling_nans = flag_signaling_nans;\
13825 int saved_trapping_math = flag_trapping_math;\
13826 int saved_rounding_math = flag_rounding_math;\
13827 int saved_trapv = flag_trapv;\
13828 int saved_folding_initializer = folding_initializer;\
13829 flag_signaling_nans = 0;\
13830 flag_trapping_math = 0;\
13831 flag_rounding_math = 0;\
13833 folding_initializer = 1;
13835 #define END_FOLD_INIT \
13836 flag_signaling_nans = saved_signaling_nans;\
13837 flag_trapping_math = saved_trapping_math;\
13838 flag_rounding_math = saved_rounding_math;\
13839 flag_trapv = saved_trapv;\
13840 folding_initializer = saved_folding_initializer;
13843 fold_build1_initializer (enum tree_code code, tree type, tree op)
13848 result = fold_build1 (code, type, op);
13855 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13860 result = fold_build2 (code, type, op0, op1);
13867 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13873 result = fold_build3 (code, type, op0, op1, op2);
13880 fold_build_call_array_initializer (tree type, tree fn,
13881 int nargs, tree *argarray)
13886 result = fold_build_call_array (type, fn, nargs, argarray);
13892 #undef START_FOLD_INIT
13893 #undef END_FOLD_INIT
13895 /* Determine if first argument is a multiple of second argument. Return 0 if
13896 it is not, or we cannot easily determined it to be.
13898 An example of the sort of thing we care about (at this point; this routine
13899 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13900 fold cases do now) is discovering that
13902 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13908 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13910 This code also handles discovering that
13912 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13914 is a multiple of 8 so we don't have to worry about dealing with a
13915 possible remainder.
13917 Note that we *look* inside a SAVE_EXPR only to determine how it was
13918 calculated; it is not safe for fold to do much of anything else with the
13919 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13920 at run time. For example, the latter example above *cannot* be implemented
13921 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13922 evaluation time of the original SAVE_EXPR is not necessarily the same at
13923 the time the new expression is evaluated. The only optimization of this
13924 sort that would be valid is changing
13926 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13930 SAVE_EXPR (I) * SAVE_EXPR (J)
13932 (where the same SAVE_EXPR (J) is used in the original and the
13933 transformed version). */
13936 multiple_of_p (tree type, const_tree top, const_tree bottom)
13938 if (operand_equal_p (top, bottom, 0))
13941 if (TREE_CODE (type) != INTEGER_TYPE)
13944 switch (TREE_CODE (top))
13947 /* Bitwise and provides a power of two multiple. If the mask is
13948 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13949 if (!integer_pow2p (bottom))
13954 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13955 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13959 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13960 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13963 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13967 op1 = TREE_OPERAND (top, 1);
13968 /* const_binop may not detect overflow correctly,
13969 so check for it explicitly here. */
13970 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13971 > TREE_INT_CST_LOW (op1)
13972 && TREE_INT_CST_HIGH (op1) == 0
13973 && 0 != (t1 = fold_convert (type,
13974 const_binop (LSHIFT_EXPR,
13977 && !TREE_OVERFLOW (t1))
13978 return multiple_of_p (type, t1, bottom);
13983 /* Can't handle conversions from non-integral or wider integral type. */
13984 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13985 || (TYPE_PRECISION (type)
13986 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13989 /* .. fall through ... */
13992 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13995 if (TREE_CODE (bottom) != INTEGER_CST
13996 || integer_zerop (bottom)
13997 || (TYPE_UNSIGNED (type)
13998 && (tree_int_cst_sgn (top) < 0
13999 || tree_int_cst_sgn (bottom) < 0)))
14001 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14009 /* Return true if `t' is known to be non-negative. If the return
14010 value is based on the assumption that signed overflow is undefined,
14011 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14012 *STRICT_OVERFLOW_P. */
14015 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14017 if (t == error_mark_node)
14020 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14023 switch (TREE_CODE (t))
14026 /* Query VRP to see if it has recorded any information about
14027 the range of this object. */
14028 return ssa_name_nonnegative_p (t);
14031 /* We can't return 1 if flag_wrapv is set because
14032 ABS_EXPR<INT_MIN> = INT_MIN. */
14033 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14035 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
14037 *strict_overflow_p = true;
14043 return tree_int_cst_sgn (t) >= 0;
14046 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14049 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14051 case POINTER_PLUS_EXPR:
14053 if (FLOAT_TYPE_P (TREE_TYPE (t)))
14054 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14056 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14057 strict_overflow_p));
14059 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14060 both unsigned and at least 2 bits shorter than the result. */
14061 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
14062 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
14063 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
14065 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
14066 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
14067 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14068 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14070 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14071 TYPE_PRECISION (inner2)) + 1;
14072 return prec < TYPE_PRECISION (TREE_TYPE (t));
14078 if (FLOAT_TYPE_P (TREE_TYPE (t)))
14080 /* x * x for floating point x is always non-negative. */
14081 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
14083 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14085 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14086 strict_overflow_p));
14089 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14090 both unsigned and their total bits is shorter than the result. */
14091 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
14092 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
14093 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
14095 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
14096 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
14097 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14098 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14099 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14100 < TYPE_PRECISION (TREE_TYPE (t));
14106 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14108 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14109 strict_overflow_p));
14115 case TRUNC_DIV_EXPR:
14116 case CEIL_DIV_EXPR:
14117 case FLOOR_DIV_EXPR:
14118 case ROUND_DIV_EXPR:
14119 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14121 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14122 strict_overflow_p));
14124 case TRUNC_MOD_EXPR:
14125 case CEIL_MOD_EXPR:
14126 case FLOOR_MOD_EXPR:
14127 case ROUND_MOD_EXPR:
14129 case NON_LVALUE_EXPR:
14131 case FIX_TRUNC_EXPR:
14132 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14133 strict_overflow_p);
14135 case COMPOUND_EXPR:
14137 case GIMPLE_MODIFY_STMT:
14138 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14139 strict_overflow_p);
14142 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14143 strict_overflow_p);
14146 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14148 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14149 strict_overflow_p));
14153 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14154 tree outer_type = TREE_TYPE (t);
14156 if (TREE_CODE (outer_type) == REAL_TYPE)
14158 if (TREE_CODE (inner_type) == REAL_TYPE)
14159 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14160 strict_overflow_p);
14161 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14163 if (TYPE_UNSIGNED (inner_type))
14165 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14166 strict_overflow_p);
14169 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14171 if (TREE_CODE (inner_type) == REAL_TYPE)
14172 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
14173 strict_overflow_p);
14174 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14175 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14176 && TYPE_UNSIGNED (inner_type);
14183 tree temp = TARGET_EXPR_SLOT (t);
14184 t = TARGET_EXPR_INITIAL (t);
14186 /* If the initializer is non-void, then it's a normal expression
14187 that will be assigned to the slot. */
14188 if (!VOID_TYPE_P (t))
14189 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14191 /* Otherwise, the initializer sets the slot in some way. One common
14192 way is an assignment statement at the end of the initializer. */
14195 if (TREE_CODE (t) == BIND_EXPR)
14196 t = expr_last (BIND_EXPR_BODY (t));
14197 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14198 || TREE_CODE (t) == TRY_CATCH_EXPR)
14199 t = expr_last (TREE_OPERAND (t, 0));
14200 else if (TREE_CODE (t) == STATEMENT_LIST)
14205 if ((TREE_CODE (t) == MODIFY_EXPR
14206 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14207 && GENERIC_TREE_OPERAND (t, 0) == temp)
14208 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14209 strict_overflow_p);
14216 tree fndecl = get_callee_fndecl (t);
14217 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14218 switch (DECL_FUNCTION_CODE (fndecl))
14220 CASE_FLT_FN (BUILT_IN_ACOS):
14221 CASE_FLT_FN (BUILT_IN_ACOSH):
14222 CASE_FLT_FN (BUILT_IN_CABS):
14223 CASE_FLT_FN (BUILT_IN_COSH):
14224 CASE_FLT_FN (BUILT_IN_ERFC):
14225 CASE_FLT_FN (BUILT_IN_EXP):
14226 CASE_FLT_FN (BUILT_IN_EXP10):
14227 CASE_FLT_FN (BUILT_IN_EXP2):
14228 CASE_FLT_FN (BUILT_IN_FABS):
14229 CASE_FLT_FN (BUILT_IN_FDIM):
14230 CASE_FLT_FN (BUILT_IN_HYPOT):
14231 CASE_FLT_FN (BUILT_IN_POW10):
14232 CASE_INT_FN (BUILT_IN_FFS):
14233 CASE_INT_FN (BUILT_IN_PARITY):
14234 CASE_INT_FN (BUILT_IN_POPCOUNT):
14235 case BUILT_IN_BSWAP32:
14236 case BUILT_IN_BSWAP64:
14240 CASE_FLT_FN (BUILT_IN_SQRT):
14241 /* sqrt(-0.0) is -0.0. */
14242 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
14244 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14245 strict_overflow_p);
14247 CASE_FLT_FN (BUILT_IN_ASINH):
14248 CASE_FLT_FN (BUILT_IN_ATAN):
14249 CASE_FLT_FN (BUILT_IN_ATANH):
14250 CASE_FLT_FN (BUILT_IN_CBRT):
14251 CASE_FLT_FN (BUILT_IN_CEIL):
14252 CASE_FLT_FN (BUILT_IN_ERF):
14253 CASE_FLT_FN (BUILT_IN_EXPM1):
14254 CASE_FLT_FN (BUILT_IN_FLOOR):
14255 CASE_FLT_FN (BUILT_IN_FMOD):
14256 CASE_FLT_FN (BUILT_IN_FREXP):
14257 CASE_FLT_FN (BUILT_IN_LCEIL):
14258 CASE_FLT_FN (BUILT_IN_LDEXP):
14259 CASE_FLT_FN (BUILT_IN_LFLOOR):
14260 CASE_FLT_FN (BUILT_IN_LLCEIL):
14261 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14262 CASE_FLT_FN (BUILT_IN_LLRINT):
14263 CASE_FLT_FN (BUILT_IN_LLROUND):
14264 CASE_FLT_FN (BUILT_IN_LRINT):
14265 CASE_FLT_FN (BUILT_IN_LROUND):
14266 CASE_FLT_FN (BUILT_IN_MODF):
14267 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14268 CASE_FLT_FN (BUILT_IN_RINT):
14269 CASE_FLT_FN (BUILT_IN_ROUND):
14270 CASE_FLT_FN (BUILT_IN_SCALB):
14271 CASE_FLT_FN (BUILT_IN_SCALBLN):
14272 CASE_FLT_FN (BUILT_IN_SCALBN):
14273 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14274 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14275 CASE_FLT_FN (BUILT_IN_SINH):
14276 CASE_FLT_FN (BUILT_IN_TANH):
14277 CASE_FLT_FN (BUILT_IN_TRUNC):
14278 /* True if the 1st argument is nonnegative. */
14279 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14280 strict_overflow_p);
14282 CASE_FLT_FN (BUILT_IN_FMAX):
14283 /* True if the 1st OR 2nd arguments are nonnegative. */
14284 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14286 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14287 strict_overflow_p)));
14289 CASE_FLT_FN (BUILT_IN_FMIN):
14290 /* True if the 1st AND 2nd arguments are nonnegative. */
14291 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14293 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14294 strict_overflow_p)));
14296 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14297 /* True if the 2nd argument is nonnegative. */
14298 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14299 strict_overflow_p);
14301 CASE_FLT_FN (BUILT_IN_POWI):
14302 /* True if the 1st argument is nonnegative or the second
14303 argument is an even integer. */
14304 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
14306 tree arg1 = CALL_EXPR_ARG (t, 1);
14307 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
14310 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14311 strict_overflow_p);
14313 CASE_FLT_FN (BUILT_IN_POW):
14314 /* True if the 1st argument is nonnegative or the second
14315 argument is an even integer valued real. */
14316 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
14321 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
14322 n = real_to_integer (&c);
14325 REAL_VALUE_TYPE cint;
14326 real_from_integer (&cint, VOIDmode, n,
14327 n < 0 ? -1 : 0, 0);
14328 if (real_identical (&c, &cint))
14332 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14333 strict_overflow_p);
14340 /* ... fall through ... */
14344 tree type = TREE_TYPE (t);
14345 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14346 && truth_value_p (TREE_CODE (t)))
14347 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14348 have a signed:1 type (where the value is -1 and 0). */
14353 /* We don't know sign of `t', so be conservative and return false. */
14357 /* Return true if `t' is known to be non-negative. Handle warnings
14358 about undefined signed overflow. */
14361 tree_expr_nonnegative_p (tree t)
14363 bool ret, strict_overflow_p;
14365 strict_overflow_p = false;
14366 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14367 if (strict_overflow_p)
14368 fold_overflow_warning (("assuming signed overflow does not occur when "
14369 "determining that expression is always "
14371 WARN_STRICT_OVERFLOW_MISC);
14375 /* Return true when T is an address and is known to be nonzero.
14376 For floating point we further ensure that T is not denormal.
14377 Similar logic is present in nonzero_address in rtlanal.h.
14379 If the return value is based on the assumption that signed overflow
14380 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14381 change *STRICT_OVERFLOW_P. */
14384 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14386 tree type = TREE_TYPE (t);
14387 bool sub_strict_overflow_p;
14389 /* Doing something useful for floating point would need more work. */
14390 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14393 switch (TREE_CODE (t))
14396 /* Query VRP to see if it has recorded any information about
14397 the range of this object. */
14398 return ssa_name_nonzero_p (t);
14401 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14402 strict_overflow_p);
14405 return !integer_zerop (t);
14407 case POINTER_PLUS_EXPR:
14409 if (TYPE_OVERFLOW_UNDEFINED (type))
14411 /* With the presence of negative values it is hard
14412 to say something. */
14413 sub_strict_overflow_p = false;
14414 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14415 &sub_strict_overflow_p)
14416 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14417 &sub_strict_overflow_p))
14419 /* One of operands must be positive and the other non-negative. */
14420 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14421 overflows, on a twos-complement machine the sum of two
14422 nonnegative numbers can never be zero. */
14423 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14425 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14426 strict_overflow_p));
14431 if (TYPE_OVERFLOW_UNDEFINED (type))
14433 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14435 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14436 strict_overflow_p))
14438 *strict_overflow_p = true;
14446 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14447 tree outer_type = TREE_TYPE (t);
14449 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14450 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14451 strict_overflow_p));
14457 tree base = get_base_address (TREE_OPERAND (t, 0));
14462 /* Weak declarations may link to NULL. */
14463 if (VAR_OR_FUNCTION_DECL_P (base))
14464 return !DECL_WEAK (base);
14466 /* Constants are never weak. */
14467 if (CONSTANT_CLASS_P (base))
14474 sub_strict_overflow_p = false;
14475 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14476 &sub_strict_overflow_p)
14477 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14478 &sub_strict_overflow_p))
14480 if (sub_strict_overflow_p)
14481 *strict_overflow_p = true;
14487 sub_strict_overflow_p = false;
14488 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14489 &sub_strict_overflow_p)
14490 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14491 &sub_strict_overflow_p))
14493 if (sub_strict_overflow_p)
14494 *strict_overflow_p = true;
14499 sub_strict_overflow_p = false;
14500 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14501 &sub_strict_overflow_p))
14503 if (sub_strict_overflow_p)
14504 *strict_overflow_p = true;
14506 /* When both operands are nonzero, then MAX must be too. */
14507 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14508 strict_overflow_p))
14511 /* MAX where operand 0 is positive is positive. */
14512 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14513 strict_overflow_p);
14515 /* MAX where operand 1 is positive is positive. */
14516 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14517 &sub_strict_overflow_p)
14518 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14519 &sub_strict_overflow_p))
14521 if (sub_strict_overflow_p)
14522 *strict_overflow_p = true;
14527 case COMPOUND_EXPR:
14529 case GIMPLE_MODIFY_STMT:
14531 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14532 strict_overflow_p);
14535 case NON_LVALUE_EXPR:
14536 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14537 strict_overflow_p);
14540 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14542 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14543 strict_overflow_p));
14546 return alloca_call_p (t);
14554 /* Return true when T is an address and is known to be nonzero.
14555 Handle warnings about undefined signed overflow. */
14558 tree_expr_nonzero_p (tree t)
14560 bool ret, strict_overflow_p;
14562 strict_overflow_p = false;
14563 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14564 if (strict_overflow_p)
14565 fold_overflow_warning (("assuming signed overflow does not occur when "
14566 "determining that expression is always "
14568 WARN_STRICT_OVERFLOW_MISC);
14572 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14573 attempt to fold the expression to a constant without modifying TYPE,
14576 If the expression could be simplified to a constant, then return
14577 the constant. If the expression would not be simplified to a
14578 constant, then return NULL_TREE. */
14581 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14583 tree tem = fold_binary (code, type, op0, op1);
14584 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14587 /* Given the components of a unary expression CODE, TYPE and OP0,
14588 attempt to fold the expression to a constant without modifying
14591 If the expression could be simplified to a constant, then return
14592 the constant. If the expression would not be simplified to a
14593 constant, then return NULL_TREE. */
14596 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14598 tree tem = fold_unary (code, type, op0);
14599 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14602 /* If EXP represents referencing an element in a constant string
14603 (either via pointer arithmetic or array indexing), return the
14604 tree representing the value accessed, otherwise return NULL. */
14607 fold_read_from_constant_string (tree exp)
14609 if ((TREE_CODE (exp) == INDIRECT_REF
14610 || TREE_CODE (exp) == ARRAY_REF)
14611 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14613 tree exp1 = TREE_OPERAND (exp, 0);
14617 if (TREE_CODE (exp) == INDIRECT_REF)
14618 string = string_constant (exp1, &index);
14621 tree low_bound = array_ref_low_bound (exp);
14622 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14624 /* Optimize the special-case of a zero lower bound.
14626 We convert the low_bound to sizetype to avoid some problems
14627 with constant folding. (E.g. suppose the lower bound is 1,
14628 and its mode is QI. Without the conversion,l (ARRAY
14629 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14630 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14631 if (! integer_zerop (low_bound))
14632 index = size_diffop (index, fold_convert (sizetype, low_bound));
14638 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14639 && TREE_CODE (string) == STRING_CST
14640 && TREE_CODE (index) == INTEGER_CST
14641 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14642 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14644 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14645 return build_int_cst_type (TREE_TYPE (exp),
14646 (TREE_STRING_POINTER (string)
14647 [TREE_INT_CST_LOW (index)]));
14652 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14653 an integer constant, real, or fixed-point constant.
14655 TYPE is the type of the result. */
14658 fold_negate_const (tree arg0, tree type)
14660 tree t = NULL_TREE;
14662 switch (TREE_CODE (arg0))
14666 unsigned HOST_WIDE_INT low;
14667 HOST_WIDE_INT high;
14668 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14669 TREE_INT_CST_HIGH (arg0),
14671 t = force_fit_type_double (type, low, high, 1,
14672 (overflow | TREE_OVERFLOW (arg0))
14673 && !TYPE_UNSIGNED (type));
14678 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14683 FIXED_VALUE_TYPE f;
14684 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14685 &(TREE_FIXED_CST (arg0)), NULL,
14686 TYPE_SATURATING (type));
14687 t = build_fixed (type, f);
14688 /* Propagate overflow flags. */
14689 if (overflow_p | TREE_OVERFLOW (arg0))
14691 TREE_OVERFLOW (t) = 1;
14692 TREE_CONSTANT_OVERFLOW (t) = 1;
14694 else if (TREE_CONSTANT_OVERFLOW (arg0))
14695 TREE_CONSTANT_OVERFLOW (t) = 1;
14700 gcc_unreachable ();
14706 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14707 an integer constant or real constant.
14709 TYPE is the type of the result. */
14712 fold_abs_const (tree arg0, tree type)
14714 tree t = NULL_TREE;
14716 switch (TREE_CODE (arg0))
14719 /* If the value is unsigned, then the absolute value is
14720 the same as the ordinary value. */
14721 if (TYPE_UNSIGNED (type))
14723 /* Similarly, if the value is non-negative. */
14724 else if (INT_CST_LT (integer_minus_one_node, arg0))
14726 /* If the value is negative, then the absolute value is
14730 unsigned HOST_WIDE_INT low;
14731 HOST_WIDE_INT high;
14732 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14733 TREE_INT_CST_HIGH (arg0),
14735 t = force_fit_type_double (type, low, high, -1,
14736 overflow | TREE_OVERFLOW (arg0));
14741 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14742 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14748 gcc_unreachable ();
14754 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14755 constant. TYPE is the type of the result. */
14758 fold_not_const (tree arg0, tree type)
14760 tree t = NULL_TREE;
14762 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14764 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14765 ~TREE_INT_CST_HIGH (arg0), 0,
14766 TREE_OVERFLOW (arg0));
14771 /* Given CODE, a relational operator, the target type, TYPE and two
14772 constant operands OP0 and OP1, return the result of the
14773 relational operation. If the result is not a compile time
14774 constant, then return NULL_TREE. */
14777 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14779 int result, invert;
14781 /* From here on, the only cases we handle are when the result is
14782 known to be a constant. */
14784 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14786 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14787 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14789 /* Handle the cases where either operand is a NaN. */
14790 if (real_isnan (c0) || real_isnan (c1))
14800 case UNORDERED_EXPR:
14814 if (flag_trapping_math)
14820 gcc_unreachable ();
14823 return constant_boolean_node (result, type);
14826 return constant_boolean_node (real_compare (code, c0, c1), type);
14829 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14831 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14832 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14833 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14836 /* Handle equality/inequality of complex constants. */
14837 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14839 tree rcond = fold_relational_const (code, type,
14840 TREE_REALPART (op0),
14841 TREE_REALPART (op1));
14842 tree icond = fold_relational_const (code, type,
14843 TREE_IMAGPART (op0),
14844 TREE_IMAGPART (op1));
14845 if (code == EQ_EXPR)
14846 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14847 else if (code == NE_EXPR)
14848 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14853 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14855 To compute GT, swap the arguments and do LT.
14856 To compute GE, do LT and invert the result.
14857 To compute LE, swap the arguments, do LT and invert the result.
14858 To compute NE, do EQ and invert the result.
14860 Therefore, the code below must handle only EQ and LT. */
14862 if (code == LE_EXPR || code == GT_EXPR)
14867 code = swap_tree_comparison (code);
14870 /* Note that it is safe to invert for real values here because we
14871 have already handled the one case that it matters. */
14874 if (code == NE_EXPR || code == GE_EXPR)
14877 code = invert_tree_comparison (code, false);
14880 /* Compute a result for LT or EQ if args permit;
14881 Otherwise return T. */
14882 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14884 if (code == EQ_EXPR)
14885 result = tree_int_cst_equal (op0, op1);
14886 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14887 result = INT_CST_LT_UNSIGNED (op0, op1);
14889 result = INT_CST_LT (op0, op1);
14896 return constant_boolean_node (result, type);
14899 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14900 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14904 fold_build_cleanup_point_expr (tree type, tree expr)
14906 /* If the expression does not have side effects then we don't have to wrap
14907 it with a cleanup point expression. */
14908 if (!TREE_SIDE_EFFECTS (expr))
14911 /* If the expression is a return, check to see if the expression inside the
14912 return has no side effects or the right hand side of the modify expression
14913 inside the return. If either don't have side effects set we don't need to
14914 wrap the expression in a cleanup point expression. Note we don't check the
14915 left hand side of the modify because it should always be a return decl. */
14916 if (TREE_CODE (expr) == RETURN_EXPR)
14918 tree op = TREE_OPERAND (expr, 0);
14919 if (!op || !TREE_SIDE_EFFECTS (op))
14921 op = TREE_OPERAND (op, 1);
14922 if (!TREE_SIDE_EFFECTS (op))
14926 return build1 (CLEANUP_POINT_EXPR, type, expr);
14929 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14930 of an indirection through OP0, or NULL_TREE if no simplification is
14934 fold_indirect_ref_1 (tree type, tree op0)
14940 subtype = TREE_TYPE (sub);
14941 if (!POINTER_TYPE_P (subtype))
14944 if (TREE_CODE (sub) == ADDR_EXPR)
14946 tree op = TREE_OPERAND (sub, 0);
14947 tree optype = TREE_TYPE (op);
14948 /* *&CONST_DECL -> to the value of the const decl. */
14949 if (TREE_CODE (op) == CONST_DECL)
14950 return DECL_INITIAL (op);
14951 /* *&p => p; make sure to handle *&"str"[cst] here. */
14952 if (type == optype)
14954 tree fop = fold_read_from_constant_string (op);
14960 /* *(foo *)&fooarray => fooarray[0] */
14961 else if (TREE_CODE (optype) == ARRAY_TYPE
14962 && type == TREE_TYPE (optype))
14964 tree type_domain = TYPE_DOMAIN (optype);
14965 tree min_val = size_zero_node;
14966 if (type_domain && TYPE_MIN_VALUE (type_domain))
14967 min_val = TYPE_MIN_VALUE (type_domain);
14968 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14970 /* *(foo *)&complexfoo => __real__ complexfoo */
14971 else if (TREE_CODE (optype) == COMPLEX_TYPE
14972 && type == TREE_TYPE (optype))
14973 return fold_build1 (REALPART_EXPR, type, op);
14974 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14975 else if (TREE_CODE (optype) == VECTOR_TYPE
14976 && type == TREE_TYPE (optype))
14978 tree part_width = TYPE_SIZE (type);
14979 tree index = bitsize_int (0);
14980 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14984 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14985 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14986 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14988 tree op00 = TREE_OPERAND (sub, 0);
14989 tree op01 = TREE_OPERAND (sub, 1);
14993 op00type = TREE_TYPE (op00);
14994 if (TREE_CODE (op00) == ADDR_EXPR
14995 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14996 && type == TREE_TYPE (TREE_TYPE (op00type)))
14998 tree size = TYPE_SIZE_UNIT (type);
14999 if (tree_int_cst_equal (size, op01))
15000 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15004 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15005 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15006 && type == TREE_TYPE (TREE_TYPE (subtype)))
15009 tree min_val = size_zero_node;
15010 sub = build_fold_indirect_ref (sub);
15011 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15012 if (type_domain && TYPE_MIN_VALUE (type_domain))
15013 min_val = TYPE_MIN_VALUE (type_domain);
15014 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15020 /* Builds an expression for an indirection through T, simplifying some
15024 build_fold_indirect_ref (tree t)
15026 tree type = TREE_TYPE (TREE_TYPE (t));
15027 tree sub = fold_indirect_ref_1 (type, t);
15032 return build1 (INDIRECT_REF, type, t);
15035 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15038 fold_indirect_ref (tree t)
15040 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15048 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15049 whose result is ignored. The type of the returned tree need not be
15050 the same as the original expression. */
15053 fold_ignored_result (tree t)
15055 if (!TREE_SIDE_EFFECTS (t))
15056 return integer_zero_node;
15059 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15062 t = TREE_OPERAND (t, 0);
15066 case tcc_comparison:
15067 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15068 t = TREE_OPERAND (t, 0);
15069 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15070 t = TREE_OPERAND (t, 1);
15075 case tcc_expression:
15076 switch (TREE_CODE (t))
15078 case COMPOUND_EXPR:
15079 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15081 t = TREE_OPERAND (t, 0);
15085 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15086 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15088 t = TREE_OPERAND (t, 0);
15101 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15102 This can only be applied to objects of a sizetype. */
15105 round_up (tree value, int divisor)
15107 tree div = NULL_TREE;
15109 gcc_assert (divisor > 0);
15113 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15114 have to do anything. Only do this when we are not given a const,
15115 because in that case, this check is more expensive than just
15117 if (TREE_CODE (value) != INTEGER_CST)
15119 div = build_int_cst (TREE_TYPE (value), divisor);
15121 if (multiple_of_p (TREE_TYPE (value), value, div))
15125 /* If divisor is a power of two, simplify this to bit manipulation. */
15126 if (divisor == (divisor & -divisor))
15128 if (TREE_CODE (value) == INTEGER_CST)
15130 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15131 unsigned HOST_WIDE_INT high;
15134 if ((low & (divisor - 1)) == 0)
15137 overflow_p = TREE_OVERFLOW (value);
15138 high = TREE_INT_CST_HIGH (value);
15139 low &= ~(divisor - 1);
15148 return force_fit_type_double (TREE_TYPE (value), low, high,
15155 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15156 value = size_binop (PLUS_EXPR, value, t);
15157 t = build_int_cst (TREE_TYPE (value), -divisor);
15158 value = size_binop (BIT_AND_EXPR, value, t);
15164 div = build_int_cst (TREE_TYPE (value), divisor);
15165 value = size_binop (CEIL_DIV_EXPR, value, div);
15166 value = size_binop (MULT_EXPR, value, div);
15172 /* Likewise, but round down. */
15175 round_down (tree value, int divisor)
15177 tree div = NULL_TREE;
15179 gcc_assert (divisor > 0);
15183 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15184 have to do anything. Only do this when we are not given a const,
15185 because in that case, this check is more expensive than just
15187 if (TREE_CODE (value) != INTEGER_CST)
15189 div = build_int_cst (TREE_TYPE (value), divisor);
15191 if (multiple_of_p (TREE_TYPE (value), value, div))
15195 /* If divisor is a power of two, simplify this to bit manipulation. */
15196 if (divisor == (divisor & -divisor))
15200 t = build_int_cst (TREE_TYPE (value), -divisor);
15201 value = size_binop (BIT_AND_EXPR, value, t);
15206 div = build_int_cst (TREE_TYPE (value), divisor);
15207 value = size_binop (FLOOR_DIV_EXPR, value, div);
15208 value = size_binop (MULT_EXPR, value, div);
15214 /* Returns the pointer to the base of the object addressed by EXP and
15215 extracts the information about the offset of the access, storing it
15216 to PBITPOS and POFFSET. */
15219 split_address_to_core_and_offset (tree exp,
15220 HOST_WIDE_INT *pbitpos, tree *poffset)
15223 enum machine_mode mode;
15224 int unsignedp, volatilep;
15225 HOST_WIDE_INT bitsize;
15227 if (TREE_CODE (exp) == ADDR_EXPR)
15229 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15230 poffset, &mode, &unsignedp, &volatilep,
15232 core = fold_addr_expr (core);
15238 *poffset = NULL_TREE;
15244 /* Returns true if addresses of E1 and E2 differ by a constant, false
15245 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15248 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15251 HOST_WIDE_INT bitpos1, bitpos2;
15252 tree toffset1, toffset2, tdiff, type;
15254 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15255 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15257 if (bitpos1 % BITS_PER_UNIT != 0
15258 || bitpos2 % BITS_PER_UNIT != 0
15259 || !operand_equal_p (core1, core2, 0))
15262 if (toffset1 && toffset2)
15264 type = TREE_TYPE (toffset1);
15265 if (type != TREE_TYPE (toffset2))
15266 toffset2 = fold_convert (type, toffset2);
15268 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15269 if (!cst_and_fits_in_hwi (tdiff))
15272 *diff = int_cst_value (tdiff);
15274 else if (toffset1 || toffset2)
15276 /* If only one of the offsets is non-constant, the difference cannot
15283 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15287 /* Simplify the floating point expression EXP when the sign of the
15288 result is not significant. Return NULL_TREE if no simplification
15292 fold_strip_sign_ops (tree exp)
15296 switch (TREE_CODE (exp))
15300 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15301 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15305 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15307 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15308 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15309 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15310 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15311 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15312 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15315 case COMPOUND_EXPR:
15316 arg0 = TREE_OPERAND (exp, 0);
15317 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15319 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15323 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15324 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15326 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15327 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15328 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15333 const enum built_in_function fcode = builtin_mathfn_code (exp);
15336 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15337 /* Strip copysign function call, return the 1st argument. */
15338 arg0 = CALL_EXPR_ARG (exp, 0);
15339 arg1 = CALL_EXPR_ARG (exp, 1);
15340 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15343 /* Strip sign ops from the argument of "odd" math functions. */
15344 if (negate_mathfn_p (fcode))
15346 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15348 return build_call_expr (get_callee_fndecl (exp), 1, arg0);