1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
285 int sign_extended_type;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 unsigned HOST_WIDE_INT l;
331 h = h1 + h2 + (l < l1);
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
355 return (*hv & h1) < 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
388 memset (prod, 0, sizeof prod);
390 for (i = 0; i < 4; i++)
393 for (j = 0; j < 4; j++)
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
409 /* Unsigned overflow is immediate. */
411 return (toplow | tophigh) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
439 unsigned HOST_WIDE_INT signmask;
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
447 if (SHIFT_COUNT_TRUNCATED)
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
457 else if (count >= HOST_BITS_PER_WIDE_INT)
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
469 /* Sign extend all bits that are beyond the precision. */
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 unsigned HOST_WIDE_INT signmask;
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 if (SHIFT_COUNT_TRUNCATED)
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
518 else if (count >= HOST_BITS_PER_WIDE_INT)
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count >= (HOST_WIDE_INT)prec)
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
645 neg_double (lden, hden, &lden, &hden);
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
652 /* This unsigned division rounds toward zero. */
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
666 memset (quo, 0, sizeof quo);
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale = BASE / (den[den_hi_sig] + 1);
705 { /* scale divisor and dividend */
707 for (i = 0; i <= 4 - 1; i++)
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
716 for (i = 0; i <= 4 - 1; i++)
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
754 for (j = 0; j <= den_hi_sig; j++)
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
776 num [num_hi_sig] += carry;
779 /* Store the quotient digit. */
784 decode (quo, lquo, hquo);
787 /* If result is negative, make it so. */
789 neg_double (*lquo, *hquo, lquo, hquo);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
827 case ROUND_MOD_EXPR: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
834 /* Get absolute values. */
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
838 neg_double (lden, hden, &labs_den, &habs_den);
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, <wice, &htwice);
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den < ltwice)))
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
880 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
910 return build_int_cst_wide (type, quol, quoh);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
922 static int fold_deferring_overflow_warnings;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
955 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
964 if (fold_deferred_overflow_warning != NULL
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
974 if (!issue || warnmsg == NULL)
977 /* Use the smallest code level when deciding to issue the
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 gcc_assert (!flag_wrapv && !flag_trapv);
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1199 return negate_expr_p (tem);
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 tree rpart = negate_expr (TREE_REALPART (t));
1264 tree ipart = negate_expr (TREE_IMAGPART (t));
1266 if ((TREE_CODE (rpart) == REAL_CST
1267 && TREE_CODE (ipart) == REAL_CST)
1268 || (TREE_CODE (rpart) == INTEGER_CST
1269 && TREE_CODE (ipart) == INTEGER_CST))
1270 return build_complex (type, rpart, ipart);
1275 if (negate_expr_p (t))
1276 return fold_build2 (COMPLEX_EXPR, type,
1277 fold_negate_expr (TREE_OPERAND (t, 0)),
1278 fold_negate_expr (TREE_OPERAND (t, 1)));
1282 if (negate_expr_p (t))
1283 return fold_build1 (CONJ_EXPR, type,
1284 fold_negate_expr (TREE_OPERAND (t, 0)));
1288 return TREE_OPERAND (t, 0);
1291 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1292 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1294 /* -(A + B) -> (-B) - A. */
1295 if (negate_expr_p (TREE_OPERAND (t, 1))
1296 && reorder_operands_p (TREE_OPERAND (t, 0),
1297 TREE_OPERAND (t, 1)))
1299 tem = negate_expr (TREE_OPERAND (t, 1));
1300 return fold_build2 (MINUS_EXPR, type,
1301 tem, TREE_OPERAND (t, 0));
1304 /* -(A + B) -> (-A) - B. */
1305 if (negate_expr_p (TREE_OPERAND (t, 0)))
1307 tem = negate_expr (TREE_OPERAND (t, 0));
1308 return fold_build2 (MINUS_EXPR, type,
1309 tem, TREE_OPERAND (t, 1));
1315 /* - (A - B) -> B - A */
1316 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1317 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1318 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1319 return fold_build2 (MINUS_EXPR, type,
1320 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1324 if (TYPE_UNSIGNED (type))
1330 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1332 tem = TREE_OPERAND (t, 1);
1333 if (negate_expr_p (tem))
1334 return fold_build2 (TREE_CODE (t), type,
1335 TREE_OPERAND (t, 0), negate_expr (tem));
1336 tem = TREE_OPERAND (t, 0);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 negate_expr (tem), TREE_OPERAND (t, 1));
1343 case TRUNC_DIV_EXPR:
1344 case ROUND_DIV_EXPR:
1345 case FLOOR_DIV_EXPR:
1347 case EXACT_DIV_EXPR:
1348 /* In general we can't negate A / B, because if A is INT_MIN and
1349 B is 1, we may turn this into INT_MIN / -1 which is undefined
1350 and actually traps on some architectures. But if overflow is
1351 undefined, we can negate, because - (INT_MIN / 1) is an
1353 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1355 const char * const warnmsg = G_("assuming signed overflow does not "
1356 "occur when negating a division");
1357 tem = TREE_OPERAND (t, 1);
1358 if (negate_expr_p (tem))
1360 if (INTEGRAL_TYPE_P (type)
1361 && (TREE_CODE (tem) != INTEGER_CST
1362 || integer_onep (tem)))
1363 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1364 return fold_build2 (TREE_CODE (t), type,
1365 TREE_OPERAND (t, 0), negate_expr (tem));
1367 tem = TREE_OPERAND (t, 0);
1368 if (negate_expr_p (tem))
1370 if (INTEGRAL_TYPE_P (type)
1371 && (TREE_CODE (tem) != INTEGER_CST
1372 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1373 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1374 return fold_build2 (TREE_CODE (t), type,
1375 negate_expr (tem), TREE_OPERAND (t, 1));
1381 /* Convert -((double)float) into (double)(-float). */
1382 if (TREE_CODE (type) == REAL_TYPE)
1384 tem = strip_float_extensions (t);
1385 if (tem != t && negate_expr_p (tem))
1386 return negate_expr (tem);
1391 /* Negate -f(x) as f(-x). */
1392 if (negate_mathfn_p (builtin_mathfn_code (t))
1393 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1397 fndecl = get_callee_fndecl (t);
1398 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1399 return build_call_expr (fndecl, 1, arg);
1404 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1405 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1407 tree op1 = TREE_OPERAND (t, 1);
1408 if (TREE_INT_CST_HIGH (op1) == 0
1409 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1410 == TREE_INT_CST_LOW (op1))
1412 tree ntype = TYPE_UNSIGNED (type)
1413 ? signed_type_for (type)
1414 : unsigned_type_for (type);
1415 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1416 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1417 return fold_convert (type, temp);
1429 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1430 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1431 return NULL_TREE. */
1434 negate_expr (tree t)
1441 type = TREE_TYPE (t);
1442 STRIP_SIGN_NOPS (t);
1444 tem = fold_negate_expr (t);
1446 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1447 return fold_convert (type, tem);
1450 /* Split a tree IN into a constant, literal and variable parts that could be
1451 combined with CODE to make IN. "constant" means an expression with
1452 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1453 commutative arithmetic operation. Store the constant part into *CONP,
1454 the literal in *LITP and return the variable part. If a part isn't
1455 present, set it to null. If the tree does not decompose in this way,
1456 return the entire tree as the variable part and the other parts as null.
1458 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1459 case, we negate an operand that was subtracted. Except if it is a
1460 literal for which we use *MINUS_LITP instead.
1462 If NEGATE_P is true, we are negating all of IN, again except a literal
1463 for which we use *MINUS_LITP instead.
1465 If IN is itself a literal or constant, return it as appropriate.
1467 Note that we do not guarantee that any of the three values will be the
1468 same type as IN, but they will have the same signedness and mode. */
1471 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1472 tree *minus_litp, int negate_p)
1480 /* Strip any conversions that don't change the machine mode or signedness. */
1481 STRIP_SIGN_NOPS (in);
1483 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1485 else if (TREE_CODE (in) == code
1486 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1487 /* We can associate addition and subtraction together (even
1488 though the C standard doesn't say so) for integers because
1489 the value is not affected. For reals, the value might be
1490 affected, so we can't. */
1491 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1492 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1494 tree op0 = TREE_OPERAND (in, 0);
1495 tree op1 = TREE_OPERAND (in, 1);
1496 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1497 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1499 /* First see if either of the operands is a literal, then a constant. */
1500 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1501 *litp = op0, op0 = 0;
1502 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1503 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1505 if (op0 != 0 && TREE_CONSTANT (op0))
1506 *conp = op0, op0 = 0;
1507 else if (op1 != 0 && TREE_CONSTANT (op1))
1508 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1510 /* If we haven't dealt with either operand, this is not a case we can
1511 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1512 if (op0 != 0 && op1 != 0)
1517 var = op1, neg_var_p = neg1_p;
1519 /* Now do any needed negations. */
1521 *minus_litp = *litp, *litp = 0;
1523 *conp = negate_expr (*conp);
1525 var = negate_expr (var);
1527 else if (TREE_CONSTANT (in))
1535 *minus_litp = *litp, *litp = 0;
1536 else if (*minus_litp)
1537 *litp = *minus_litp, *minus_litp = 0;
1538 *conp = negate_expr (*conp);
1539 var = negate_expr (var);
1545 /* Re-associate trees split by the above function. T1 and T2 are either
1546 expressions to associate or null. Return the new expression, if any. If
1547 we build an operation, do it in TYPE and with CODE. */
1550 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1557 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1558 try to fold this since we will have infinite recursion. But do
1559 deal with any NEGATE_EXPRs. */
1560 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1561 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1563 if (code == PLUS_EXPR)
1565 if (TREE_CODE (t1) == NEGATE_EXPR)
1566 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1567 fold_convert (type, TREE_OPERAND (t1, 0)));
1568 else if (TREE_CODE (t2) == NEGATE_EXPR)
1569 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1570 fold_convert (type, TREE_OPERAND (t2, 0)));
1571 else if (integer_zerop (t2))
1572 return fold_convert (type, t1);
1574 else if (code == MINUS_EXPR)
1576 if (integer_zerop (t2))
1577 return fold_convert (type, t1);
1580 return build2 (code, type, fold_convert (type, t1),
1581 fold_convert (type, t2));
1584 return fold_build2 (code, type, fold_convert (type, t1),
1585 fold_convert (type, t2));
1588 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1589 for use in int_const_binop, size_binop and size_diffop. */
1592 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1594 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1596 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1611 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1612 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1613 && TYPE_MODE (type1) == TYPE_MODE (type2);
1617 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1618 to produce a new constant. Return NULL_TREE if we don't know how
1619 to evaluate CODE at compile-time.
1621 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1624 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1626 unsigned HOST_WIDE_INT int1l, int2l;
1627 HOST_WIDE_INT int1h, int2h;
1628 unsigned HOST_WIDE_INT low;
1630 unsigned HOST_WIDE_INT garbagel;
1631 HOST_WIDE_INT garbageh;
1633 tree type = TREE_TYPE (arg1);
1634 int uns = TYPE_UNSIGNED (type);
1636 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1639 int1l = TREE_INT_CST_LOW (arg1);
1640 int1h = TREE_INT_CST_HIGH (arg1);
1641 int2l = TREE_INT_CST_LOW (arg2);
1642 int2h = TREE_INT_CST_HIGH (arg2);
1647 low = int1l | int2l, hi = int1h | int2h;
1651 low = int1l ^ int2l, hi = int1h ^ int2h;
1655 low = int1l & int2l, hi = int1h & int2h;
1661 /* It's unclear from the C standard whether shifts can overflow.
1662 The following code ignores overflow; perhaps a C standard
1663 interpretation ruling is needed. */
1664 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1671 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1676 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1680 neg_double (int2l, int2h, &low, &hi);
1681 add_double (int1l, int1h, low, hi, &low, &hi);
1682 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1686 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 case TRUNC_DIV_EXPR:
1690 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1691 case EXACT_DIV_EXPR:
1692 /* This is a shortcut for a common special case. */
1693 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1694 && !TREE_OVERFLOW (arg1)
1695 && !TREE_OVERFLOW (arg2)
1696 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1698 if (code == CEIL_DIV_EXPR)
1701 low = int1l / int2l, hi = 0;
1705 /* ... fall through ... */
1707 case ROUND_DIV_EXPR:
1708 if (int2h == 0 && int2l == 0)
1710 if (int2h == 0 && int2l == 1)
1712 low = int1l, hi = int1h;
1715 if (int1l == int2l && int1h == int2h
1716 && ! (int1l == 0 && int1h == 0))
1721 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1722 &low, &hi, &garbagel, &garbageh);
1725 case TRUNC_MOD_EXPR:
1726 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1727 /* This is a shortcut for a common special case. */
1728 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1729 && !TREE_OVERFLOW (arg1)
1730 && !TREE_OVERFLOW (arg2)
1731 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1733 if (code == CEIL_MOD_EXPR)
1735 low = int1l % int2l, hi = 0;
1739 /* ... fall through ... */
1741 case ROUND_MOD_EXPR:
1742 if (int2h == 0 && int2l == 0)
1744 overflow = div_and_round_double (code, uns,
1745 int1l, int1h, int2l, int2h,
1746 &garbagel, &garbageh, &low, &hi);
1752 low = (((unsigned HOST_WIDE_INT) int1h
1753 < (unsigned HOST_WIDE_INT) int2h)
1754 || (((unsigned HOST_WIDE_INT) int1h
1755 == (unsigned HOST_WIDE_INT) int2h)
1758 low = (int1h < int2h
1759 || (int1h == int2h && int1l < int2l));
1761 if (low == (code == MIN_EXPR))
1762 low = int1l, hi = int1h;
1764 low = int2l, hi = int2h;
1773 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1775 /* Propagate overflow flags ourselves. */
1776 if (((!uns || is_sizetype) && overflow)
1777 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1780 TREE_OVERFLOW (t) = 1;
1784 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1785 ((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1791 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1792 constant. We assume ARG1 and ARG2 have the same data type, or at least
1793 are the same kind of constant and the same machine mode. Return zero if
1794 combining the constants is not allowed in the current operating mode.
1796 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1799 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1801 /* Sanity check for the recursive cases. */
1808 if (TREE_CODE (arg1) == INTEGER_CST)
1809 return int_const_binop (code, arg1, arg2, notrunc);
1811 if (TREE_CODE (arg1) == REAL_CST)
1813 enum machine_mode mode;
1816 REAL_VALUE_TYPE value;
1817 REAL_VALUE_TYPE result;
1821 /* The following codes are handled by real_arithmetic. */
1836 d1 = TREE_REAL_CST (arg1);
1837 d2 = TREE_REAL_CST (arg2);
1839 type = TREE_TYPE (arg1);
1840 mode = TYPE_MODE (type);
1842 /* Don't perform operation if we honor signaling NaNs and
1843 either operand is a NaN. */
1844 if (HONOR_SNANS (mode)
1845 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1848 /* Don't perform operation if it would raise a division
1849 by zero exception. */
1850 if (code == RDIV_EXPR
1851 && REAL_VALUES_EQUAL (d2, dconst0)
1852 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1855 /* If either operand is a NaN, just return it. Otherwise, set up
1856 for floating-point trap; we return an overflow. */
1857 if (REAL_VALUE_ISNAN (d1))
1859 else if (REAL_VALUE_ISNAN (d2))
1862 inexact = real_arithmetic (&value, code, &d1, &d2);
1863 real_convert (&result, mode, &value);
1865 /* Don't constant fold this floating point operation if
1866 the result has overflowed and flag_trapping_math. */
1867 if (flag_trapping_math
1868 && MODE_HAS_INFINITIES (mode)
1869 && REAL_VALUE_ISINF (result)
1870 && !REAL_VALUE_ISINF (d1)
1871 && !REAL_VALUE_ISINF (d2))
1874 /* Don't constant fold this floating point operation if the
1875 result may dependent upon the run-time rounding mode and
1876 flag_rounding_math is set, or if GCC's software emulation
1877 is unable to accurately represent the result. */
1878 if ((flag_rounding_math
1879 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1880 && !flag_unsafe_math_optimizations))
1881 && (inexact || !real_identical (&result, &value)))
1884 t = build_real (type, result);
1886 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1890 if (TREE_CODE (arg1) == COMPLEX_CST)
1892 tree type = TREE_TYPE (arg1);
1893 tree r1 = TREE_REALPART (arg1);
1894 tree i1 = TREE_IMAGPART (arg1);
1895 tree r2 = TREE_REALPART (arg2);
1896 tree i2 = TREE_IMAGPART (arg2);
1903 real = const_binop (code, r1, r2, notrunc);
1904 imag = const_binop (code, i1, i2, notrunc);
1908 real = const_binop (MINUS_EXPR,
1909 const_binop (MULT_EXPR, r1, r2, notrunc),
1910 const_binop (MULT_EXPR, i1, i2, notrunc),
1912 imag = const_binop (PLUS_EXPR,
1913 const_binop (MULT_EXPR, r1, i2, notrunc),
1914 const_binop (MULT_EXPR, i1, r2, notrunc),
1921 = const_binop (PLUS_EXPR,
1922 const_binop (MULT_EXPR, r2, r2, notrunc),
1923 const_binop (MULT_EXPR, i2, i2, notrunc),
1926 = const_binop (PLUS_EXPR,
1927 const_binop (MULT_EXPR, r1, r2, notrunc),
1928 const_binop (MULT_EXPR, i1, i2, notrunc),
1931 = const_binop (MINUS_EXPR,
1932 const_binop (MULT_EXPR, i1, r2, notrunc),
1933 const_binop (MULT_EXPR, r1, i2, notrunc),
1936 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1937 code = TRUNC_DIV_EXPR;
1939 real = const_binop (code, t1, magsquared, notrunc);
1940 imag = const_binop (code, t2, magsquared, notrunc);
1949 return build_complex (type, real, imag);
1955 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1956 indicates which particular sizetype to create. */
1959 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1961 return build_int_cst (sizetype_tab[(int) kind], number);
1964 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1965 is a tree code. The type of the result is taken from the operands.
1966 Both must be equivalent integer types, ala int_binop_types_match_p.
1967 If the operands are constant, so is the result. */
1970 size_binop (enum tree_code code, tree arg0, tree arg1)
1972 tree type = TREE_TYPE (arg0);
1974 if (arg0 == error_mark_node || arg1 == error_mark_node)
1975 return error_mark_node;
1977 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1980 /* Handle the special case of two integer constants faster. */
1981 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1983 /* And some specific cases even faster than that. */
1984 if (code == PLUS_EXPR)
1986 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1988 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1991 else if (code == MINUS_EXPR)
1993 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1996 else if (code == MULT_EXPR)
1998 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2002 /* Handle general case of two integer constants. */
2003 return int_const_binop (code, arg0, arg1, 0);
2006 return fold_build2 (code, type, arg0, arg1);
2009 /* Given two values, either both of sizetype or both of bitsizetype,
2010 compute the difference between the two values. Return the value
2011 in signed type corresponding to the type of the operands. */
2014 size_diffop (tree arg0, tree arg1)
2016 tree type = TREE_TYPE (arg0);
2019 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2022 /* If the type is already signed, just do the simple thing. */
2023 if (!TYPE_UNSIGNED (type))
2024 return size_binop (MINUS_EXPR, arg0, arg1);
2026 if (type == sizetype)
2028 else if (type == bitsizetype)
2029 ctype = sbitsizetype;
2031 ctype = signed_type_for (type);
2033 /* If either operand is not a constant, do the conversions to the signed
2034 type and subtract. The hardware will do the right thing with any
2035 overflow in the subtraction. */
2036 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2037 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2038 fold_convert (ctype, arg1));
2040 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2041 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2042 overflow) and negate (which can't either). Special-case a result
2043 of zero while we're here. */
2044 if (tree_int_cst_equal (arg0, arg1))
2045 return build_int_cst (ctype, 0);
2046 else if (tree_int_cst_lt (arg1, arg0))
2047 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2049 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2050 fold_convert (ctype, size_binop (MINUS_EXPR,
2054 /* A subroutine of fold_convert_const handling conversions of an
2055 INTEGER_CST to another integer type. */
2058 fold_convert_const_int_from_int (tree type, tree arg1)
2062 /* Given an integer constant, make new constant with new type,
2063 appropriately sign-extended or truncated. */
2064 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2065 TREE_INT_CST_HIGH (arg1),
2066 /* Don't set the overflow when
2067 converting a pointer */
2068 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2069 (TREE_INT_CST_HIGH (arg1) < 0
2070 && (TYPE_UNSIGNED (type)
2071 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2072 | TREE_OVERFLOW (arg1));
2077 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2078 to an integer type. */
2081 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2086 /* The following code implements the floating point to integer
2087 conversion rules required by the Java Language Specification,
2088 that IEEE NaNs are mapped to zero and values that overflow
2089 the target precision saturate, i.e. values greater than
2090 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2091 are mapped to INT_MIN. These semantics are allowed by the
2092 C and C++ standards that simply state that the behavior of
2093 FP-to-integer conversion is unspecified upon overflow. */
2095 HOST_WIDE_INT high, low;
2097 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2101 case FIX_TRUNC_EXPR:
2102 real_trunc (&r, VOIDmode, &x);
2109 /* If R is NaN, return zero and show we have an overflow. */
2110 if (REAL_VALUE_ISNAN (r))
2117 /* See if R is less than the lower bound or greater than the
2122 tree lt = TYPE_MIN_VALUE (type);
2123 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2124 if (REAL_VALUES_LESS (r, l))
2127 high = TREE_INT_CST_HIGH (lt);
2128 low = TREE_INT_CST_LOW (lt);
2134 tree ut = TYPE_MAX_VALUE (type);
2137 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2138 if (REAL_VALUES_LESS (u, r))
2141 high = TREE_INT_CST_HIGH (ut);
2142 low = TREE_INT_CST_LOW (ut);
2148 REAL_VALUE_TO_INT (&low, &high, r);
2150 t = force_fit_type_double (type, low, high, -1,
2151 overflow | TREE_OVERFLOW (arg1));
2155 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2156 to another floating point type. */
2159 fold_convert_const_real_from_real (tree type, tree arg1)
2161 REAL_VALUE_TYPE value;
2164 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2165 t = build_real (type, value);
2167 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2171 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2172 type TYPE. If no simplification can be done return NULL_TREE. */
2175 fold_convert_const (enum tree_code code, tree type, tree arg1)
2177 if (TREE_TYPE (arg1) == type)
2180 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2182 if (TREE_CODE (arg1) == INTEGER_CST)
2183 return fold_convert_const_int_from_int (type, arg1);
2184 else if (TREE_CODE (arg1) == REAL_CST)
2185 return fold_convert_const_int_from_real (code, type, arg1);
2187 else if (TREE_CODE (type) == REAL_TYPE)
2189 if (TREE_CODE (arg1) == INTEGER_CST)
2190 return build_real_from_int_cst (type, arg1);
2191 if (TREE_CODE (arg1) == REAL_CST)
2192 return fold_convert_const_real_from_real (type, arg1);
2197 /* Construct a vector of zero elements of vector type TYPE. */
2200 build_zero_vector (tree type)
2205 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2206 units = TYPE_VECTOR_SUBPARTS (type);
2209 for (i = 0; i < units; i++)
2210 list = tree_cons (NULL_TREE, elem, list);
2211 return build_vector (type, list);
2214 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2217 fold_convertible_p (tree type, tree arg)
2219 tree orig = TREE_TYPE (arg);
2224 if (TREE_CODE (arg) == ERROR_MARK
2225 || TREE_CODE (type) == ERROR_MARK
2226 || TREE_CODE (orig) == ERROR_MARK)
2229 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2232 switch (TREE_CODE (type))
2234 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2235 case POINTER_TYPE: case REFERENCE_TYPE:
2237 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2238 || TREE_CODE (orig) == OFFSET_TYPE)
2240 return (TREE_CODE (orig) == VECTOR_TYPE
2241 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2244 return TREE_CODE (type) == TREE_CODE (orig);
2248 /* Convert expression ARG to type TYPE. Used by the middle-end for
2249 simple conversions in preference to calling the front-end's convert. */
2252 fold_convert (tree type, tree arg)
2254 tree orig = TREE_TYPE (arg);
2260 if (TREE_CODE (arg) == ERROR_MARK
2261 || TREE_CODE (type) == ERROR_MARK
2262 || TREE_CODE (orig) == ERROR_MARK)
2263 return error_mark_node;
2265 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2266 return fold_build1 (NOP_EXPR, type, arg);
2268 switch (TREE_CODE (type))
2270 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2271 case POINTER_TYPE: case REFERENCE_TYPE:
2273 if (TREE_CODE (arg) == INTEGER_CST)
2275 tem = fold_convert_const (NOP_EXPR, type, arg);
2276 if (tem != NULL_TREE)
2279 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2280 || TREE_CODE (orig) == OFFSET_TYPE)
2281 return fold_build1 (NOP_EXPR, type, arg);
2282 if (TREE_CODE (orig) == COMPLEX_TYPE)
2284 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2285 return fold_convert (type, tem);
2287 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2288 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2289 return fold_build1 (NOP_EXPR, type, arg);
2292 if (TREE_CODE (arg) == INTEGER_CST)
2294 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2295 if (tem != NULL_TREE)
2298 else if (TREE_CODE (arg) == REAL_CST)
2300 tem = fold_convert_const (NOP_EXPR, type, arg);
2301 if (tem != NULL_TREE)
2305 switch (TREE_CODE (orig))
2308 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2309 case POINTER_TYPE: case REFERENCE_TYPE:
2310 return fold_build1 (FLOAT_EXPR, type, arg);
2313 return fold_build1 (NOP_EXPR, type, arg);
2316 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2317 return fold_convert (type, tem);
2324 switch (TREE_CODE (orig))
2327 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2328 case POINTER_TYPE: case REFERENCE_TYPE:
2330 return build2 (COMPLEX_EXPR, type,
2331 fold_convert (TREE_TYPE (type), arg),
2332 fold_convert (TREE_TYPE (type), integer_zero_node));
2337 if (TREE_CODE (arg) == COMPLEX_EXPR)
2339 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2340 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2341 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2344 arg = save_expr (arg);
2345 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2346 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2347 rpart = fold_convert (TREE_TYPE (type), rpart);
2348 ipart = fold_convert (TREE_TYPE (type), ipart);
2349 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2357 if (integer_zerop (arg))
2358 return build_zero_vector (type);
2359 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2360 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2361 || TREE_CODE (orig) == VECTOR_TYPE);
2362 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2365 tem = fold_ignored_result (arg);
2366 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2368 return fold_build1 (NOP_EXPR, type, tem);
2375 /* Return false if expr can be assumed not to be an lvalue, true
2379 maybe_lvalue_p (tree x)
2381 /* We only need to wrap lvalue tree codes. */
2382 switch (TREE_CODE (x))
2393 case ALIGN_INDIRECT_REF:
2394 case MISALIGNED_INDIRECT_REF:
2396 case ARRAY_RANGE_REF:
2402 case PREINCREMENT_EXPR:
2403 case PREDECREMENT_EXPR:
2405 case TRY_CATCH_EXPR:
2406 case WITH_CLEANUP_EXPR:
2409 case GIMPLE_MODIFY_STMT:
2418 /* Assume the worst for front-end tree codes. */
2419 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2427 /* Return an expr equal to X but certainly not valid as an lvalue. */
2432 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2437 if (! maybe_lvalue_p (x))
2439 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2442 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2443 Zero means allow extended lvalues. */
2445 int pedantic_lvalues;
2447 /* When pedantic, return an expr equal to X but certainly not valid as a
2448 pedantic lvalue. Otherwise, return X. */
2451 pedantic_non_lvalue (tree x)
2453 if (pedantic_lvalues)
2454 return non_lvalue (x);
2459 /* Given a tree comparison code, return the code that is the logical inverse
2460 of the given code. It is not safe to do this for floating-point
2461 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2462 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2465 invert_tree_comparison (enum tree_code code, bool honor_nans)
2467 if (honor_nans && flag_trapping_math)
2477 return honor_nans ? UNLE_EXPR : LE_EXPR;
2479 return honor_nans ? UNLT_EXPR : LT_EXPR;
2481 return honor_nans ? UNGE_EXPR : GE_EXPR;
2483 return honor_nans ? UNGT_EXPR : GT_EXPR;
2497 return UNORDERED_EXPR;
2498 case UNORDERED_EXPR:
2499 return ORDERED_EXPR;
2505 /* Similar, but return the comparison that results if the operands are
2506 swapped. This is safe for floating-point. */
2509 swap_tree_comparison (enum tree_code code)
2516 case UNORDERED_EXPR:
2542 /* Convert a comparison tree code from an enum tree_code representation
2543 into a compcode bit-based encoding. This function is the inverse of
2544 compcode_to_comparison. */
2546 static enum comparison_code
2547 comparison_to_compcode (enum tree_code code)
2564 return COMPCODE_ORD;
2565 case UNORDERED_EXPR:
2566 return COMPCODE_UNORD;
2568 return COMPCODE_UNLT;
2570 return COMPCODE_UNEQ;
2572 return COMPCODE_UNLE;
2574 return COMPCODE_UNGT;
2576 return COMPCODE_LTGT;
2578 return COMPCODE_UNGE;
2584 /* Convert a compcode bit-based encoding of a comparison operator back
2585 to GCC's enum tree_code representation. This function is the
2586 inverse of comparison_to_compcode. */
2588 static enum tree_code
2589 compcode_to_comparison (enum comparison_code code)
2606 return ORDERED_EXPR;
2607 case COMPCODE_UNORD:
2608 return UNORDERED_EXPR;
2626 /* Return a tree for the comparison which is the combination of
2627 doing the AND or OR (depending on CODE) of the two operations LCODE
2628 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2629 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2630 if this makes the transformation invalid. */
2633 combine_comparisons (enum tree_code code, enum tree_code lcode,
2634 enum tree_code rcode, tree truth_type,
2635 tree ll_arg, tree lr_arg)
2637 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2638 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2639 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2640 enum comparison_code compcode;
2644 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2645 compcode = lcompcode & rcompcode;
2648 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2649 compcode = lcompcode | rcompcode;
2658 /* Eliminate unordered comparisons, as well as LTGT and ORD
2659 which are not used unless the mode has NaNs. */
2660 compcode &= ~COMPCODE_UNORD;
2661 if (compcode == COMPCODE_LTGT)
2662 compcode = COMPCODE_NE;
2663 else if (compcode == COMPCODE_ORD)
2664 compcode = COMPCODE_TRUE;
2666 else if (flag_trapping_math)
2668 /* Check that the original operation and the optimized ones will trap
2669 under the same condition. */
2670 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2671 && (lcompcode != COMPCODE_EQ)
2672 && (lcompcode != COMPCODE_ORD);
2673 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2674 && (rcompcode != COMPCODE_EQ)
2675 && (rcompcode != COMPCODE_ORD);
2676 bool trap = (compcode & COMPCODE_UNORD) == 0
2677 && (compcode != COMPCODE_EQ)
2678 && (compcode != COMPCODE_ORD);
2680 /* In a short-circuited boolean expression the LHS might be
2681 such that the RHS, if evaluated, will never trap. For
2682 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2683 if neither x nor y is NaN. (This is a mixed blessing: for
2684 example, the expression above will never trap, hence
2685 optimizing it to x < y would be invalid). */
2686 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2687 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2690 /* If the comparison was short-circuited, and only the RHS
2691 trapped, we may now generate a spurious trap. */
2693 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2696 /* If we changed the conditions that cause a trap, we lose. */
2697 if ((ltrap || rtrap) != trap)
2701 if (compcode == COMPCODE_TRUE)
2702 return constant_boolean_node (true, truth_type);
2703 else if (compcode == COMPCODE_FALSE)
2704 return constant_boolean_node (false, truth_type);
2706 return fold_build2 (compcode_to_comparison (compcode),
2707 truth_type, ll_arg, lr_arg);
2710 /* Return nonzero if CODE is a tree code that represents a truth value. */
2713 truth_value_p (enum tree_code code)
2715 return (TREE_CODE_CLASS (code) == tcc_comparison
2716 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2717 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2718 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2721 /* Return nonzero if two operands (typically of the same tree node)
2722 are necessarily equal. If either argument has side-effects this
2723 function returns zero. FLAGS modifies behavior as follows:
2725 If OEP_ONLY_CONST is set, only return nonzero for constants.
2726 This function tests whether the operands are indistinguishable;
2727 it does not test whether they are equal using C's == operation.
2728 The distinction is important for IEEE floating point, because
2729 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2730 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2732 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2733 even though it may hold multiple values during a function.
2734 This is because a GCC tree node guarantees that nothing else is
2735 executed between the evaluation of its "operands" (which may often
2736 be evaluated in arbitrary order). Hence if the operands themselves
2737 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2738 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2739 unset means assuming isochronic (or instantaneous) tree equivalence.
2740 Unless comparing arbitrary expression trees, such as from different
2741 statements, this flag can usually be left unset.
2743 If OEP_PURE_SAME is set, then pure functions with identical arguments
2744 are considered the same. It is used when the caller has other ways
2745 to ensure that global memory is unchanged in between. */
2748 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2750 /* If either is ERROR_MARK, they aren't equal. */
2751 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2754 /* If both types don't have the same signedness, then we can't consider
2755 them equal. We must check this before the STRIP_NOPS calls
2756 because they may change the signedness of the arguments. */
2757 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2760 /* If both types don't have the same precision, then it is not safe
2762 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2768 /* In case both args are comparisons but with different comparison
2769 code, try to swap the comparison operands of one arg to produce
2770 a match and compare that variant. */
2771 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2772 && COMPARISON_CLASS_P (arg0)
2773 && COMPARISON_CLASS_P (arg1))
2775 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2777 if (TREE_CODE (arg0) == swap_code)
2778 return operand_equal_p (TREE_OPERAND (arg0, 0),
2779 TREE_OPERAND (arg1, 1), flags)
2780 && operand_equal_p (TREE_OPERAND (arg0, 1),
2781 TREE_OPERAND (arg1, 0), flags);
2784 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2785 /* This is needed for conversions and for COMPONENT_REF.
2786 Might as well play it safe and always test this. */
2787 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2788 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2789 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2792 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2793 We don't care about side effects in that case because the SAVE_EXPR
2794 takes care of that for us. In all other cases, two expressions are
2795 equal if they have no side effects. If we have two identical
2796 expressions with side effects that should be treated the same due
2797 to the only side effects being identical SAVE_EXPR's, that will
2798 be detected in the recursive calls below. */
2799 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2800 && (TREE_CODE (arg0) == SAVE_EXPR
2801 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2804 /* Next handle constant cases, those for which we can return 1 even
2805 if ONLY_CONST is set. */
2806 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2807 switch (TREE_CODE (arg0))
2810 return tree_int_cst_equal (arg0, arg1);
2813 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2814 TREE_REAL_CST (arg1)))
2818 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2820 /* If we do not distinguish between signed and unsigned zero,
2821 consider them equal. */
2822 if (real_zerop (arg0) && real_zerop (arg1))
2831 v1 = TREE_VECTOR_CST_ELTS (arg0);
2832 v2 = TREE_VECTOR_CST_ELTS (arg1);
2835 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2838 v1 = TREE_CHAIN (v1);
2839 v2 = TREE_CHAIN (v2);
2846 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2848 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2852 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2853 && ! memcmp (TREE_STRING_POINTER (arg0),
2854 TREE_STRING_POINTER (arg1),
2855 TREE_STRING_LENGTH (arg0)));
2858 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2864 if (flags & OEP_ONLY_CONST)
2867 /* Define macros to test an operand from arg0 and arg1 for equality and a
2868 variant that allows null and views null as being different from any
2869 non-null value. In the latter case, if either is null, the both
2870 must be; otherwise, do the normal comparison. */
2871 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2872 TREE_OPERAND (arg1, N), flags)
2874 #define OP_SAME_WITH_NULL(N) \
2875 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2876 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2878 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2881 /* Two conversions are equal only if signedness and modes match. */
2882 switch (TREE_CODE (arg0))
2886 case FIX_TRUNC_EXPR:
2887 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2888 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2898 case tcc_comparison:
2900 if (OP_SAME (0) && OP_SAME (1))
2903 /* For commutative ops, allow the other order. */
2904 return (commutative_tree_code (TREE_CODE (arg0))
2905 && operand_equal_p (TREE_OPERAND (arg0, 0),
2906 TREE_OPERAND (arg1, 1), flags)
2907 && operand_equal_p (TREE_OPERAND (arg0, 1),
2908 TREE_OPERAND (arg1, 0), flags));
2911 /* If either of the pointer (or reference) expressions we are
2912 dereferencing contain a side effect, these cannot be equal. */
2913 if (TREE_SIDE_EFFECTS (arg0)
2914 || TREE_SIDE_EFFECTS (arg1))
2917 switch (TREE_CODE (arg0))
2920 case ALIGN_INDIRECT_REF:
2921 case MISALIGNED_INDIRECT_REF:
2927 case ARRAY_RANGE_REF:
2928 /* Operands 2 and 3 may be null.
2929 Compare the array index by value if it is constant first as we
2930 may have different types but same value here. */
2932 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2933 TREE_OPERAND (arg1, 1))
2935 && OP_SAME_WITH_NULL (2)
2936 && OP_SAME_WITH_NULL (3));
2939 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2940 may be NULL when we're called to compare MEM_EXPRs. */
2941 return OP_SAME_WITH_NULL (0)
2943 && OP_SAME_WITH_NULL (2);
2946 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2952 case tcc_expression:
2953 switch (TREE_CODE (arg0))
2956 case TRUTH_NOT_EXPR:
2959 case TRUTH_ANDIF_EXPR:
2960 case TRUTH_ORIF_EXPR:
2961 return OP_SAME (0) && OP_SAME (1);
2963 case TRUTH_AND_EXPR:
2965 case TRUTH_XOR_EXPR:
2966 if (OP_SAME (0) && OP_SAME (1))
2969 /* Otherwise take into account this is a commutative operation. */
2970 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2971 TREE_OPERAND (arg1, 1), flags)
2972 && operand_equal_p (TREE_OPERAND (arg0, 1),
2973 TREE_OPERAND (arg1, 0), flags));
2980 switch (TREE_CODE (arg0))
2983 /* If the CALL_EXPRs call different functions, then they
2984 clearly can not be equal. */
2985 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2990 unsigned int cef = call_expr_flags (arg0);
2991 if (flags & OEP_PURE_SAME)
2992 cef &= ECF_CONST | ECF_PURE;
2999 /* Now see if all the arguments are the same. */
3001 call_expr_arg_iterator iter0, iter1;
3003 for (a0 = first_call_expr_arg (arg0, &iter0),
3004 a1 = first_call_expr_arg (arg1, &iter1);
3006 a0 = next_call_expr_arg (&iter0),
3007 a1 = next_call_expr_arg (&iter1))
3008 if (! operand_equal_p (a0, a1, flags))
3011 /* If we get here and both argument lists are exhausted
3012 then the CALL_EXPRs are equal. */
3013 return ! (a0 || a1);
3019 case tcc_declaration:
3020 /* Consider __builtin_sqrt equal to sqrt. */
3021 return (TREE_CODE (arg0) == FUNCTION_DECL
3022 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3023 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3024 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3031 #undef OP_SAME_WITH_NULL
3034 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3035 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3037 When in doubt, return 0. */
3040 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3042 int unsignedp1, unsignedpo;
3043 tree primarg0, primarg1, primother;
3044 unsigned int correct_width;
3046 if (operand_equal_p (arg0, arg1, 0))
3049 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3050 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3053 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3054 and see if the inner values are the same. This removes any
3055 signedness comparison, which doesn't matter here. */
3056 primarg0 = arg0, primarg1 = arg1;
3057 STRIP_NOPS (primarg0);
3058 STRIP_NOPS (primarg1);
3059 if (operand_equal_p (primarg0, primarg1, 0))
3062 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3063 actual comparison operand, ARG0.
3065 First throw away any conversions to wider types
3066 already present in the operands. */
3068 primarg1 = get_narrower (arg1, &unsignedp1);
3069 primother = get_narrower (other, &unsignedpo);
3071 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3072 if (unsignedp1 == unsignedpo
3073 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3074 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3076 tree type = TREE_TYPE (arg0);
3078 /* Make sure shorter operand is extended the right way
3079 to match the longer operand. */
3080 primarg1 = fold_convert (signed_or_unsigned_type_for
3081 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3083 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3090 /* See if ARG is an expression that is either a comparison or is performing
3091 arithmetic on comparisons. The comparisons must only be comparing
3092 two different values, which will be stored in *CVAL1 and *CVAL2; if
3093 they are nonzero it means that some operands have already been found.
3094 No variables may be used anywhere else in the expression except in the
3095 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3096 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3098 If this is true, return 1. Otherwise, return zero. */
3101 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3103 enum tree_code code = TREE_CODE (arg);
3104 enum tree_code_class class = TREE_CODE_CLASS (code);
3106 /* We can handle some of the tcc_expression cases here. */
3107 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3109 else if (class == tcc_expression
3110 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3111 || code == COMPOUND_EXPR))
3114 else if (class == tcc_expression && code == SAVE_EXPR
3115 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3117 /* If we've already found a CVAL1 or CVAL2, this expression is
3118 two complex to handle. */
3119 if (*cval1 || *cval2)
3129 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3132 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3133 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3134 cval1, cval2, save_p));
3139 case tcc_expression:
3140 if (code == COND_EXPR)
3141 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3142 cval1, cval2, save_p)
3143 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3144 cval1, cval2, save_p)
3145 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3146 cval1, cval2, save_p));
3149 case tcc_comparison:
3150 /* First see if we can handle the first operand, then the second. For
3151 the second operand, we know *CVAL1 can't be zero. It must be that
3152 one side of the comparison is each of the values; test for the
3153 case where this isn't true by failing if the two operands
3156 if (operand_equal_p (TREE_OPERAND (arg, 0),
3157 TREE_OPERAND (arg, 1), 0))
3161 *cval1 = TREE_OPERAND (arg, 0);
3162 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3164 else if (*cval2 == 0)
3165 *cval2 = TREE_OPERAND (arg, 0);
3166 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3171 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3173 else if (*cval2 == 0)
3174 *cval2 = TREE_OPERAND (arg, 1);
3175 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3187 /* ARG is a tree that is known to contain just arithmetic operations and
3188 comparisons. Evaluate the operations in the tree substituting NEW0 for
3189 any occurrence of OLD0 as an operand of a comparison and likewise for
3193 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3195 tree type = TREE_TYPE (arg);
3196 enum tree_code code = TREE_CODE (arg);
3197 enum tree_code_class class = TREE_CODE_CLASS (code);
3199 /* We can handle some of the tcc_expression cases here. */
3200 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3202 else if (class == tcc_expression
3203 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3209 return fold_build1 (code, type,
3210 eval_subst (TREE_OPERAND (arg, 0),
3211 old0, new0, old1, new1));
3214 return fold_build2 (code, type,
3215 eval_subst (TREE_OPERAND (arg, 0),
3216 old0, new0, old1, new1),
3217 eval_subst (TREE_OPERAND (arg, 1),
3218 old0, new0, old1, new1));
3220 case tcc_expression:
3224 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3227 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3230 return fold_build3 (code, type,
3231 eval_subst (TREE_OPERAND (arg, 0),
3232 old0, new0, old1, new1),
3233 eval_subst (TREE_OPERAND (arg, 1),
3234 old0, new0, old1, new1),
3235 eval_subst (TREE_OPERAND (arg, 2),
3236 old0, new0, old1, new1));
3240 /* Fall through - ??? */
3242 case tcc_comparison:
3244 tree arg0 = TREE_OPERAND (arg, 0);
3245 tree arg1 = TREE_OPERAND (arg, 1);
3247 /* We need to check both for exact equality and tree equality. The
3248 former will be true if the operand has a side-effect. In that
3249 case, we know the operand occurred exactly once. */
3251 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3253 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3256 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3258 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3261 return fold_build2 (code, type, arg0, arg1);
3269 /* Return a tree for the case when the result of an expression is RESULT
3270 converted to TYPE and OMITTED was previously an operand of the expression
3271 but is now not needed (e.g., we folded OMITTED * 0).
3273 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3274 the conversion of RESULT to TYPE. */
3277 omit_one_operand (tree type, tree result, tree omitted)
3279 tree t = fold_convert (type, result);
3281 if (TREE_SIDE_EFFECTS (omitted))
3282 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3284 return non_lvalue (t);
3287 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3290 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3292 tree t = fold_convert (type, result);
3294 if (TREE_SIDE_EFFECTS (omitted))
3295 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3297 return pedantic_non_lvalue (t);
3300 /* Return a tree for the case when the result of an expression is RESULT
3301 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3302 of the expression but are now not needed.
3304 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3305 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3306 evaluated before OMITTED2. Otherwise, if neither has side effects,
3307 just do the conversion of RESULT to TYPE. */
3310 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3312 tree t = fold_convert (type, result);
3314 if (TREE_SIDE_EFFECTS (omitted2))
3315 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3316 if (TREE_SIDE_EFFECTS (omitted1))
3317 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3319 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3323 /* Return a simplified tree node for the truth-negation of ARG. This
3324 never alters ARG itself. We assume that ARG is an operation that
3325 returns a truth value (0 or 1).
3327 FIXME: one would think we would fold the result, but it causes
3328 problems with the dominator optimizer. */
3331 fold_truth_not_expr (tree arg)
3333 tree type = TREE_TYPE (arg);
3334 enum tree_code code = TREE_CODE (arg);
3336 /* If this is a comparison, we can simply invert it, except for
3337 floating-point non-equality comparisons, in which case we just
3338 enclose a TRUTH_NOT_EXPR around what we have. */
3340 if (TREE_CODE_CLASS (code) == tcc_comparison)
3342 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3343 if (FLOAT_TYPE_P (op_type)
3344 && flag_trapping_math
3345 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3346 && code != NE_EXPR && code != EQ_EXPR)
3350 code = invert_tree_comparison (code,
3351 HONOR_NANS (TYPE_MODE (op_type)));
3352 if (code == ERROR_MARK)
3355 return build2 (code, type,
3356 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3363 return constant_boolean_node (integer_zerop (arg), type);
3365 case TRUTH_AND_EXPR:
3366 return build2 (TRUTH_OR_EXPR, type,
3367 invert_truthvalue (TREE_OPERAND (arg, 0)),
3368 invert_truthvalue (TREE_OPERAND (arg, 1)));
3371 return build2 (TRUTH_AND_EXPR, type,
3372 invert_truthvalue (TREE_OPERAND (arg, 0)),
3373 invert_truthvalue (TREE_OPERAND (arg, 1)));
3375 case TRUTH_XOR_EXPR:
3376 /* Here we can invert either operand. We invert the first operand
3377 unless the second operand is a TRUTH_NOT_EXPR in which case our
3378 result is the XOR of the first operand with the inside of the
3379 negation of the second operand. */
3381 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3382 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3383 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3385 return build2 (TRUTH_XOR_EXPR, type,
3386 invert_truthvalue (TREE_OPERAND (arg, 0)),
3387 TREE_OPERAND (arg, 1));
3389 case TRUTH_ANDIF_EXPR:
3390 return build2 (TRUTH_ORIF_EXPR, type,
3391 invert_truthvalue (TREE_OPERAND (arg, 0)),
3392 invert_truthvalue (TREE_OPERAND (arg, 1)));
3394 case TRUTH_ORIF_EXPR:
3395 return build2 (TRUTH_ANDIF_EXPR, type,
3396 invert_truthvalue (TREE_OPERAND (arg, 0)),
3397 invert_truthvalue (TREE_OPERAND (arg, 1)));
3399 case TRUTH_NOT_EXPR:
3400 return TREE_OPERAND (arg, 0);
3404 tree arg1 = TREE_OPERAND (arg, 1);
3405 tree arg2 = TREE_OPERAND (arg, 2);
3406 /* A COND_EXPR may have a throw as one operand, which
3407 then has void type. Just leave void operands
3409 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3410 VOID_TYPE_P (TREE_TYPE (arg1))
3411 ? arg1 : invert_truthvalue (arg1),
3412 VOID_TYPE_P (TREE_TYPE (arg2))
3413 ? arg2 : invert_truthvalue (arg2));
3417 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3418 invert_truthvalue (TREE_OPERAND (arg, 1)));
3420 case NON_LVALUE_EXPR:
3421 return invert_truthvalue (TREE_OPERAND (arg, 0));
3424 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3425 return build1 (TRUTH_NOT_EXPR, type, arg);
3429 return build1 (TREE_CODE (arg), type,
3430 invert_truthvalue (TREE_OPERAND (arg, 0)));
3433 if (!integer_onep (TREE_OPERAND (arg, 1)))
3435 return build2 (EQ_EXPR, type, arg,
3436 build_int_cst (type, 0));
3439 return build1 (TRUTH_NOT_EXPR, type, arg);
3441 case CLEANUP_POINT_EXPR:
3442 return build1 (CLEANUP_POINT_EXPR, type,
3443 invert_truthvalue (TREE_OPERAND (arg, 0)));
3452 /* Return a simplified tree node for the truth-negation of ARG. This
3453 never alters ARG itself. We assume that ARG is an operation that
3454 returns a truth value (0 or 1).
3456 FIXME: one would think we would fold the result, but it causes
3457 problems with the dominator optimizer. */
3460 invert_truthvalue (tree arg)
3464 if (TREE_CODE (arg) == ERROR_MARK)
3467 tem = fold_truth_not_expr (arg);
3469 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3474 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3475 operands are another bit-wise operation with a common input. If so,
3476 distribute the bit operations to save an operation and possibly two if
3477 constants are involved. For example, convert
3478 (A | B) & (A | C) into A | (B & C)
3479 Further simplification will occur if B and C are constants.
3481 If this optimization cannot be done, 0 will be returned. */
3484 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3489 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3490 || TREE_CODE (arg0) == code
3491 || (TREE_CODE (arg0) != BIT_AND_EXPR
3492 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3495 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3497 common = TREE_OPERAND (arg0, 0);
3498 left = TREE_OPERAND (arg0, 1);
3499 right = TREE_OPERAND (arg1, 1);
3501 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3503 common = TREE_OPERAND (arg0, 0);
3504 left = TREE_OPERAND (arg0, 1);
3505 right = TREE_OPERAND (arg1, 0);
3507 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3509 common = TREE_OPERAND (arg0, 1);
3510 left = TREE_OPERAND (arg0, 0);
3511 right = TREE_OPERAND (arg1, 1);
3513 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3515 common = TREE_OPERAND (arg0, 1);
3516 left = TREE_OPERAND (arg0, 0);
3517 right = TREE_OPERAND (arg1, 0);
3522 return fold_build2 (TREE_CODE (arg0), type, common,
3523 fold_build2 (code, type, left, right));
3526 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3527 with code CODE. This optimization is unsafe. */
3529 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3531 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3532 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3534 /* (A / C) +- (B / C) -> (A +- B) / C. */
3536 && operand_equal_p (TREE_OPERAND (arg0, 1),
3537 TREE_OPERAND (arg1, 1), 0))
3538 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3539 fold_build2 (code, type,
3540 TREE_OPERAND (arg0, 0),
3541 TREE_OPERAND (arg1, 0)),
3542 TREE_OPERAND (arg0, 1));
3544 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3545 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3546 TREE_OPERAND (arg1, 0), 0)
3547 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3548 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3550 REAL_VALUE_TYPE r0, r1;
3551 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3552 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3554 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3556 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3557 real_arithmetic (&r0, code, &r0, &r1);
3558 return fold_build2 (MULT_EXPR, type,
3559 TREE_OPERAND (arg0, 0),
3560 build_real (type, r0));
3566 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3567 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3570 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3577 tree size = TYPE_SIZE (TREE_TYPE (inner));
3578 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3579 || POINTER_TYPE_P (TREE_TYPE (inner)))
3580 && host_integerp (size, 0)
3581 && tree_low_cst (size, 0) == bitsize)
3582 return fold_convert (type, inner);
3585 result = build3 (BIT_FIELD_REF, type, inner,
3586 size_int (bitsize), bitsize_int (bitpos));
3588 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3593 /* Optimize a bit-field compare.
3595 There are two cases: First is a compare against a constant and the
3596 second is a comparison of two items where the fields are at the same
3597 bit position relative to the start of a chunk (byte, halfword, word)
3598 large enough to contain it. In these cases we can avoid the shift
3599 implicit in bitfield extractions.
3601 For constants, we emit a compare of the shifted constant with the
3602 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3603 compared. For two fields at the same position, we do the ANDs with the
3604 similar mask and compare the result of the ANDs.
3606 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3607 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3608 are the left and right operands of the comparison, respectively.
3610 If the optimization described above can be done, we return the resulting
3611 tree. Otherwise we return zero. */
3614 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3617 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3618 tree type = TREE_TYPE (lhs);
3619 tree signed_type, unsigned_type;
3620 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3621 enum machine_mode lmode, rmode, nmode;
3622 int lunsignedp, runsignedp;
3623 int lvolatilep = 0, rvolatilep = 0;
3624 tree linner, rinner = NULL_TREE;
3628 /* Get all the information about the extractions being done. If the bit size
3629 if the same as the size of the underlying object, we aren't doing an
3630 extraction at all and so can do nothing. We also don't want to
3631 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3632 then will no longer be able to replace it. */
3633 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3634 &lunsignedp, &lvolatilep, false);
3635 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3636 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3641 /* If this is not a constant, we can only do something if bit positions,
3642 sizes, and signedness are the same. */
3643 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3644 &runsignedp, &rvolatilep, false);
3646 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3647 || lunsignedp != runsignedp || offset != 0
3648 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3652 /* See if we can find a mode to refer to this field. We should be able to,
3653 but fail if we can't. */
3654 nmode = get_best_mode (lbitsize, lbitpos,
3655 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3656 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3657 TYPE_ALIGN (TREE_TYPE (rinner))),
3658 word_mode, lvolatilep || rvolatilep);
3659 if (nmode == VOIDmode)
3662 /* Set signed and unsigned types of the precision of this mode for the
3664 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3665 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3667 /* Compute the bit position and size for the new reference and our offset
3668 within it. If the new reference is the same size as the original, we
3669 won't optimize anything, so return zero. */
3670 nbitsize = GET_MODE_BITSIZE (nmode);
3671 nbitpos = lbitpos & ~ (nbitsize - 1);
3673 if (nbitsize == lbitsize)
3676 if (BYTES_BIG_ENDIAN)
3677 lbitpos = nbitsize - lbitsize - lbitpos;
3679 /* Make the mask to be used against the extracted field. */
3680 mask = build_int_cst_type (unsigned_type, -1);
3681 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3682 mask = const_binop (RSHIFT_EXPR, mask,
3683 size_int (nbitsize - lbitsize - lbitpos), 0);
3686 /* If not comparing with constant, just rework the comparison
3688 return fold_build2 (code, compare_type,
3689 fold_build2 (BIT_AND_EXPR, unsigned_type,
3690 make_bit_field_ref (linner,
3695 fold_build2 (BIT_AND_EXPR, unsigned_type,
3696 make_bit_field_ref (rinner,
3702 /* Otherwise, we are handling the constant case. See if the constant is too
3703 big for the field. Warn and return a tree of for 0 (false) if so. We do
3704 this not only for its own sake, but to avoid having to test for this
3705 error case below. If we didn't, we might generate wrong code.
3707 For unsigned fields, the constant shifted right by the field length should
3708 be all zero. For signed fields, the high-order bits should agree with
3713 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3714 fold_convert (unsigned_type, rhs),
3715 size_int (lbitsize), 0)))
3717 warning (0, "comparison is always %d due to width of bit-field",
3719 return constant_boolean_node (code == NE_EXPR, compare_type);
3724 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3725 size_int (lbitsize - 1), 0);
3726 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3728 warning (0, "comparison is always %d due to width of bit-field",
3730 return constant_boolean_node (code == NE_EXPR, compare_type);
3734 /* Single-bit compares should always be against zero. */
3735 if (lbitsize == 1 && ! integer_zerop (rhs))
3737 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3738 rhs = build_int_cst (type, 0);
3741 /* Make a new bitfield reference, shift the constant over the
3742 appropriate number of bits and mask it with the computed mask
3743 (in case this was a signed field). If we changed it, make a new one. */
3744 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3747 TREE_SIDE_EFFECTS (lhs) = 1;
3748 TREE_THIS_VOLATILE (lhs) = 1;
3751 rhs = const_binop (BIT_AND_EXPR,
3752 const_binop (LSHIFT_EXPR,
3753 fold_convert (unsigned_type, rhs),
3754 size_int (lbitpos), 0),
3757 return build2 (code, compare_type,
3758 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3762 /* Subroutine for fold_truthop: decode a field reference.
3764 If EXP is a comparison reference, we return the innermost reference.
3766 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3767 set to the starting bit number.
3769 If the innermost field can be completely contained in a mode-sized
3770 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3772 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3773 otherwise it is not changed.
3775 *PUNSIGNEDP is set to the signedness of the field.
3777 *PMASK is set to the mask used. This is either contained in a
3778 BIT_AND_EXPR or derived from the width of the field.
3780 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3782 Return 0 if this is not a component reference or is one that we can't
3783 do anything with. */
3786 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3787 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3788 int *punsignedp, int *pvolatilep,
3789 tree *pmask, tree *pand_mask)
3791 tree outer_type = 0;
3793 tree mask, inner, offset;
3795 unsigned int precision;
3797 /* All the optimizations using this function assume integer fields.
3798 There are problems with FP fields since the type_for_size call
3799 below can fail for, e.g., XFmode. */
3800 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3803 /* We are interested in the bare arrangement of bits, so strip everything
3804 that doesn't affect the machine mode. However, record the type of the
3805 outermost expression if it may matter below. */
3806 if (TREE_CODE (exp) == NOP_EXPR
3807 || TREE_CODE (exp) == CONVERT_EXPR
3808 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3809 outer_type = TREE_TYPE (exp);
3812 if (TREE_CODE (exp) == BIT_AND_EXPR)
3814 and_mask = TREE_OPERAND (exp, 1);
3815 exp = TREE_OPERAND (exp, 0);
3816 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3817 if (TREE_CODE (and_mask) != INTEGER_CST)
3821 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3822 punsignedp, pvolatilep, false);
3823 if ((inner == exp && and_mask == 0)
3824 || *pbitsize < 0 || offset != 0
3825 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3828 /* If the number of bits in the reference is the same as the bitsize of
3829 the outer type, then the outer type gives the signedness. Otherwise
3830 (in case of a small bitfield) the signedness is unchanged. */
3831 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3832 *punsignedp = TYPE_UNSIGNED (outer_type);
3834 /* Compute the mask to access the bitfield. */
3835 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3836 precision = TYPE_PRECISION (unsigned_type);
3838 mask = build_int_cst_type (unsigned_type, -1);
3840 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3841 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3843 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3845 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3846 fold_convert (unsigned_type, and_mask), mask);
3849 *pand_mask = and_mask;
3853 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3857 all_ones_mask_p (tree mask, int size)
3859 tree type = TREE_TYPE (mask);
3860 unsigned int precision = TYPE_PRECISION (type);
3863 tmask = build_int_cst_type (signed_type_for (type), -1);
3866 tree_int_cst_equal (mask,
3867 const_binop (RSHIFT_EXPR,
3868 const_binop (LSHIFT_EXPR, tmask,
3869 size_int (precision - size),
3871 size_int (precision - size), 0));
3874 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3875 represents the sign bit of EXP's type. If EXP represents a sign
3876 or zero extension, also test VAL against the unextended type.
3877 The return value is the (sub)expression whose sign bit is VAL,
3878 or NULL_TREE otherwise. */
3881 sign_bit_p (tree exp, tree val)
3883 unsigned HOST_WIDE_INT mask_lo, lo;
3884 HOST_WIDE_INT mask_hi, hi;
3888 /* Tree EXP must have an integral type. */
3889 t = TREE_TYPE (exp);
3890 if (! INTEGRAL_TYPE_P (t))
3893 /* Tree VAL must be an integer constant. */
3894 if (TREE_CODE (val) != INTEGER_CST
3895 || TREE_OVERFLOW (val))
3898 width = TYPE_PRECISION (t);
3899 if (width > HOST_BITS_PER_WIDE_INT)
3901 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3904 mask_hi = ((unsigned HOST_WIDE_INT) -1
3905 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3911 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3914 mask_lo = ((unsigned HOST_WIDE_INT) -1
3915 >> (HOST_BITS_PER_WIDE_INT - width));
3918 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3919 treat VAL as if it were unsigned. */
3920 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3921 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3924 /* Handle extension from a narrower type. */
3925 if (TREE_CODE (exp) == NOP_EXPR
3926 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3927 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3932 /* Subroutine for fold_truthop: determine if an operand is simple enough
3933 to be evaluated unconditionally. */
3936 simple_operand_p (tree exp)
3938 /* Strip any conversions that don't change the machine mode. */
3941 return (CONSTANT_CLASS_P (exp)
3942 || TREE_CODE (exp) == SSA_NAME
3944 && ! TREE_ADDRESSABLE (exp)
3945 && ! TREE_THIS_VOLATILE (exp)
3946 && ! DECL_NONLOCAL (exp)
3947 /* Don't regard global variables as simple. They may be
3948 allocated in ways unknown to the compiler (shared memory,
3949 #pragma weak, etc). */
3950 && ! TREE_PUBLIC (exp)
3951 && ! DECL_EXTERNAL (exp)
3952 /* Loading a static variable is unduly expensive, but global
3953 registers aren't expensive. */
3954 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3957 /* The following functions are subroutines to fold_range_test and allow it to
3958 try to change a logical combination of comparisons into a range test.
3961 X == 2 || X == 3 || X == 4 || X == 5
3965 (unsigned) (X - 2) <= 3
3967 We describe each set of comparisons as being either inside or outside
3968 a range, using a variable named like IN_P, and then describe the
3969 range with a lower and upper bound. If one of the bounds is omitted,
3970 it represents either the highest or lowest value of the type.
3972 In the comments below, we represent a range by two numbers in brackets
3973 preceded by a "+" to designate being inside that range, or a "-" to
3974 designate being outside that range, so the condition can be inverted by
3975 flipping the prefix. An omitted bound is represented by a "-". For
3976 example, "- [-, 10]" means being outside the range starting at the lowest
3977 possible value and ending at 10, in other words, being greater than 10.
3978 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3981 We set up things so that the missing bounds are handled in a consistent
3982 manner so neither a missing bound nor "true" and "false" need to be
3983 handled using a special case. */
3985 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3986 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3987 and UPPER1_P are nonzero if the respective argument is an upper bound
3988 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3989 must be specified for a comparison. ARG1 will be converted to ARG0's
3990 type if both are specified. */
3993 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3994 tree arg1, int upper1_p)
4000 /* If neither arg represents infinity, do the normal operation.
4001 Else, if not a comparison, return infinity. Else handle the special
4002 comparison rules. Note that most of the cases below won't occur, but
4003 are handled for consistency. */
4005 if (arg0 != 0 && arg1 != 0)
4007 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4008 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4010 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4013 if (TREE_CODE_CLASS (code) != tcc_comparison)
4016 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4017 for neither. In real maths, we cannot assume open ended ranges are
4018 the same. But, this is computer arithmetic, where numbers are finite.
4019 We can therefore make the transformation of any unbounded range with
4020 the value Z, Z being greater than any representable number. This permits
4021 us to treat unbounded ranges as equal. */
4022 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4023 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4027 result = sgn0 == sgn1;
4030 result = sgn0 != sgn1;
4033 result = sgn0 < sgn1;
4036 result = sgn0 <= sgn1;
4039 result = sgn0 > sgn1;
4042 result = sgn0 >= sgn1;
4048 return constant_boolean_node (result, type);
4051 /* Given EXP, a logical expression, set the range it is testing into
4052 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4053 actually being tested. *PLOW and *PHIGH will be made of the same
4054 type as the returned expression. If EXP is not a comparison, we
4055 will most likely not be returning a useful value and range. Set
4056 *STRICT_OVERFLOW_P to true if the return value is only valid
4057 because signed overflow is undefined; otherwise, do not change
4058 *STRICT_OVERFLOW_P. */
4061 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4062 bool *strict_overflow_p)
4064 enum tree_code code;
4065 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4066 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4068 tree low, high, n_low, n_high;
4070 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4071 and see if we can refine the range. Some of the cases below may not
4072 happen, but it doesn't seem worth worrying about this. We "continue"
4073 the outer loop when we've changed something; otherwise we "break"
4074 the switch, which will "break" the while. */
4077 low = high = build_int_cst (TREE_TYPE (exp), 0);
4081 code = TREE_CODE (exp);
4082 exp_type = TREE_TYPE (exp);
4084 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4086 if (TREE_OPERAND_LENGTH (exp) > 0)
4087 arg0 = TREE_OPERAND (exp, 0);
4088 if (TREE_CODE_CLASS (code) == tcc_comparison
4089 || TREE_CODE_CLASS (code) == tcc_unary
4090 || TREE_CODE_CLASS (code) == tcc_binary)
4091 arg0_type = TREE_TYPE (arg0);
4092 if (TREE_CODE_CLASS (code) == tcc_binary
4093 || TREE_CODE_CLASS (code) == tcc_comparison
4094 || (TREE_CODE_CLASS (code) == tcc_expression
4095 && TREE_OPERAND_LENGTH (exp) > 1))
4096 arg1 = TREE_OPERAND (exp, 1);
4101 case TRUTH_NOT_EXPR:
4102 in_p = ! in_p, exp = arg0;
4105 case EQ_EXPR: case NE_EXPR:
4106 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4107 /* We can only do something if the range is testing for zero
4108 and if the second operand is an integer constant. Note that
4109 saying something is "in" the range we make is done by
4110 complementing IN_P since it will set in the initial case of
4111 being not equal to zero; "out" is leaving it alone. */
4112 if (low == 0 || high == 0
4113 || ! integer_zerop (low) || ! integer_zerop (high)
4114 || TREE_CODE (arg1) != INTEGER_CST)
4119 case NE_EXPR: /* - [c, c] */
4122 case EQ_EXPR: /* + [c, c] */
4123 in_p = ! in_p, low = high = arg1;
4125 case GT_EXPR: /* - [-, c] */
4126 low = 0, high = arg1;
4128 case GE_EXPR: /* + [c, -] */
4129 in_p = ! in_p, low = arg1, high = 0;
4131 case LT_EXPR: /* - [c, -] */
4132 low = arg1, high = 0;
4134 case LE_EXPR: /* + [-, c] */
4135 in_p = ! in_p, low = 0, high = arg1;
4141 /* If this is an unsigned comparison, we also know that EXP is
4142 greater than or equal to zero. We base the range tests we make
4143 on that fact, so we record it here so we can parse existing
4144 range tests. We test arg0_type since often the return type
4145 of, e.g. EQ_EXPR, is boolean. */
4146 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4148 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4150 build_int_cst (arg0_type, 0),
4154 in_p = n_in_p, low = n_low, high = n_high;
4156 /* If the high bound is missing, but we have a nonzero low
4157 bound, reverse the range so it goes from zero to the low bound
4159 if (high == 0 && low && ! integer_zerop (low))
4162 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4163 integer_one_node, 0);
4164 low = build_int_cst (arg0_type, 0);
4172 /* (-x) IN [a,b] -> x in [-b, -a] */
4173 n_low = range_binop (MINUS_EXPR, exp_type,
4174 build_int_cst (exp_type, 0),
4176 n_high = range_binop (MINUS_EXPR, exp_type,
4177 build_int_cst (exp_type, 0),
4179 low = n_low, high = n_high;
4185 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4186 build_int_cst (exp_type, 1));
4189 case PLUS_EXPR: case MINUS_EXPR:
4190 if (TREE_CODE (arg1) != INTEGER_CST)
4193 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4194 move a constant to the other side. */
4195 if (!TYPE_UNSIGNED (arg0_type)
4196 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4199 /* If EXP is signed, any overflow in the computation is undefined,
4200 so we don't worry about it so long as our computations on
4201 the bounds don't overflow. For unsigned, overflow is defined
4202 and this is exactly the right thing. */
4203 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4204 arg0_type, low, 0, arg1, 0);
4205 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4206 arg0_type, high, 1, arg1, 0);
4207 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4208 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4211 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4212 *strict_overflow_p = true;
4214 /* Check for an unsigned range which has wrapped around the maximum
4215 value thus making n_high < n_low, and normalize it. */
4216 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4218 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4219 integer_one_node, 0);
4220 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4221 integer_one_node, 0);
4223 /* If the range is of the form +/- [ x+1, x ], we won't
4224 be able to normalize it. But then, it represents the
4225 whole range or the empty set, so make it
4227 if (tree_int_cst_equal (n_low, low)
4228 && tree_int_cst_equal (n_high, high))
4234 low = n_low, high = n_high;
4239 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4240 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4243 if (! INTEGRAL_TYPE_P (arg0_type)
4244 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4245 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4248 n_low = low, n_high = high;
4251 n_low = fold_convert (arg0_type, n_low);
4254 n_high = fold_convert (arg0_type, n_high);
4257 /* If we're converting arg0 from an unsigned type, to exp,
4258 a signed type, we will be doing the comparison as unsigned.
4259 The tests above have already verified that LOW and HIGH
4262 So we have to ensure that we will handle large unsigned
4263 values the same way that the current signed bounds treat
4266 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4269 tree equiv_type = lang_hooks.types.type_for_mode
4270 (TYPE_MODE (arg0_type), 1);
4272 /* A range without an upper bound is, naturally, unbounded.
4273 Since convert would have cropped a very large value, use
4274 the max value for the destination type. */
4276 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4277 : TYPE_MAX_VALUE (arg0_type);
4279 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4280 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4281 fold_convert (arg0_type,
4283 build_int_cst (arg0_type, 1));
4285 /* If the low bound is specified, "and" the range with the
4286 range for which the original unsigned value will be
4290 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4291 1, n_low, n_high, 1,
4292 fold_convert (arg0_type,
4297 in_p = (n_in_p == in_p);
4301 /* Otherwise, "or" the range with the range of the input
4302 that will be interpreted as negative. */
4303 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4304 0, n_low, n_high, 1,
4305 fold_convert (arg0_type,
4310 in_p = (in_p != n_in_p);
4315 low = n_low, high = n_high;
4325 /* If EXP is a constant, we can evaluate whether this is true or false. */
4326 if (TREE_CODE (exp) == INTEGER_CST)
4328 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4330 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4336 *pin_p = in_p, *plow = low, *phigh = high;
4340 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4341 type, TYPE, return an expression to test if EXP is in (or out of, depending
4342 on IN_P) the range. Return 0 if the test couldn't be created. */
4345 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4347 tree etype = TREE_TYPE (exp);
4350 #ifdef HAVE_canonicalize_funcptr_for_compare
4351 /* Disable this optimization for function pointer expressions
4352 on targets that require function pointer canonicalization. */
4353 if (HAVE_canonicalize_funcptr_for_compare
4354 && TREE_CODE (etype) == POINTER_TYPE
4355 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4361 value = build_range_check (type, exp, 1, low, high);
4363 return invert_truthvalue (value);
4368 if (low == 0 && high == 0)
4369 return build_int_cst (type, 1);
4372 return fold_build2 (LE_EXPR, type, exp,
4373 fold_convert (etype, high));
4376 return fold_build2 (GE_EXPR, type, exp,
4377 fold_convert (etype, low));
4379 if (operand_equal_p (low, high, 0))
4380 return fold_build2 (EQ_EXPR, type, exp,
4381 fold_convert (etype, low));
4383 if (integer_zerop (low))
4385 if (! TYPE_UNSIGNED (etype))
4387 etype = unsigned_type_for (etype);
4388 high = fold_convert (etype, high);
4389 exp = fold_convert (etype, exp);
4391 return build_range_check (type, exp, 1, 0, high);
4394 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4395 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4397 unsigned HOST_WIDE_INT lo;
4401 prec = TYPE_PRECISION (etype);
4402 if (prec <= HOST_BITS_PER_WIDE_INT)
4405 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4409 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4410 lo = (unsigned HOST_WIDE_INT) -1;
4413 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4415 if (TYPE_UNSIGNED (etype))
4417 etype = signed_type_for (etype);
4418 exp = fold_convert (etype, exp);
4420 return fold_build2 (GT_EXPR, type, exp,
4421 build_int_cst (etype, 0));
4425 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4426 This requires wrap-around arithmetics for the type of the expression. */
4427 switch (TREE_CODE (etype))
4430 /* There is no requirement that LOW be within the range of ETYPE
4431 if the latter is a subtype. It must, however, be within the base
4432 type of ETYPE. So be sure we do the subtraction in that type. */
4433 if (TREE_TYPE (etype))
4434 etype = TREE_TYPE (etype);
4439 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4440 TYPE_UNSIGNED (etype));
4447 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4448 if (TREE_CODE (etype) == INTEGER_TYPE
4449 && !TYPE_OVERFLOW_WRAPS (etype))
4451 tree utype, minv, maxv;
4453 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4454 for the type in question, as we rely on this here. */
4455 utype = unsigned_type_for (etype);
4456 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4457 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4458 integer_one_node, 1);
4459 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4461 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4468 high = fold_convert (etype, high);
4469 low = fold_convert (etype, low);
4470 exp = fold_convert (etype, exp);
4472 value = const_binop (MINUS_EXPR, high, low, 0);
4475 if (POINTER_TYPE_P (etype))
4477 if (value != 0 && !TREE_OVERFLOW (value))
4479 low = fold_convert (sizetype, low);
4480 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4481 return build_range_check (type,
4482 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4483 1, build_int_cst (etype, 0), value);
4488 if (value != 0 && !TREE_OVERFLOW (value))
4489 return build_range_check (type,
4490 fold_build2 (MINUS_EXPR, etype, exp, low),
4491 1, build_int_cst (etype, 0), value);
4496 /* Return the predecessor of VAL in its type, handling the infinite case. */
4499 range_predecessor (tree val)
4501 tree type = TREE_TYPE (val);
4503 if (INTEGRAL_TYPE_P (type)
4504 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4507 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4510 /* Return the successor of VAL in its type, handling the infinite case. */
4513 range_successor (tree val)
4515 tree type = TREE_TYPE (val);
4517 if (INTEGRAL_TYPE_P (type)
4518 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4521 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4524 /* Given two ranges, see if we can merge them into one. Return 1 if we
4525 can, 0 if we can't. Set the output range into the specified parameters. */
4528 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4529 tree high0, int in1_p, tree low1, tree high1)
4537 int lowequal = ((low0 == 0 && low1 == 0)
4538 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4539 low0, 0, low1, 0)));
4540 int highequal = ((high0 == 0 && high1 == 0)
4541 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4542 high0, 1, high1, 1)));
4544 /* Make range 0 be the range that starts first, or ends last if they
4545 start at the same value. Swap them if it isn't. */
4546 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4549 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4550 high1, 1, high0, 1))))
4552 temp = in0_p, in0_p = in1_p, in1_p = temp;
4553 tem = low0, low0 = low1, low1 = tem;
4554 tem = high0, high0 = high1, high1 = tem;
4557 /* Now flag two cases, whether the ranges are disjoint or whether the
4558 second range is totally subsumed in the first. Note that the tests
4559 below are simplified by the ones above. */
4560 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4561 high0, 1, low1, 0));
4562 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4563 high1, 1, high0, 1));
4565 /* We now have four cases, depending on whether we are including or
4566 excluding the two ranges. */
4569 /* If they don't overlap, the result is false. If the second range
4570 is a subset it is the result. Otherwise, the range is from the start
4571 of the second to the end of the first. */
4573 in_p = 0, low = high = 0;
4575 in_p = 1, low = low1, high = high1;
4577 in_p = 1, low = low1, high = high0;
4580 else if (in0_p && ! in1_p)
4582 /* If they don't overlap, the result is the first range. If they are
4583 equal, the result is false. If the second range is a subset of the
4584 first, and the ranges begin at the same place, we go from just after
4585 the end of the second range to the end of the first. If the second
4586 range is not a subset of the first, or if it is a subset and both
4587 ranges end at the same place, the range starts at the start of the
4588 first range and ends just before the second range.
4589 Otherwise, we can't describe this as a single range. */
4591 in_p = 1, low = low0, high = high0;
4592 else if (lowequal && highequal)
4593 in_p = 0, low = high = 0;
4594 else if (subset && lowequal)
4596 low = range_successor (high1);
4601 /* We are in the weird situation where high0 > high1 but
4602 high1 has no successor. Punt. */
4606 else if (! subset || highequal)
4609 high = range_predecessor (low1);
4613 /* low0 < low1 but low1 has no predecessor. Punt. */
4621 else if (! in0_p && in1_p)
4623 /* If they don't overlap, the result is the second range. If the second
4624 is a subset of the first, the result is false. Otherwise,
4625 the range starts just after the first range and ends at the
4626 end of the second. */
4628 in_p = 1, low = low1, high = high1;
4629 else if (subset || highequal)
4630 in_p = 0, low = high = 0;
4633 low = range_successor (high0);
4638 /* high1 > high0 but high0 has no successor. Punt. */
4646 /* The case where we are excluding both ranges. Here the complex case
4647 is if they don't overlap. In that case, the only time we have a
4648 range is if they are adjacent. If the second is a subset of the
4649 first, the result is the first. Otherwise, the range to exclude
4650 starts at the beginning of the first range and ends at the end of the
4654 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4655 range_successor (high0),
4657 in_p = 0, low = low0, high = high1;
4660 /* Canonicalize - [min, x] into - [-, x]. */
4661 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4662 switch (TREE_CODE (TREE_TYPE (low0)))
4665 if (TYPE_PRECISION (TREE_TYPE (low0))
4666 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4670 if (tree_int_cst_equal (low0,
4671 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4675 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4676 && integer_zerop (low0))
4683 /* Canonicalize - [x, max] into - [x, -]. */
4684 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4685 switch (TREE_CODE (TREE_TYPE (high1)))
4688 if (TYPE_PRECISION (TREE_TYPE (high1))
4689 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4693 if (tree_int_cst_equal (high1,
4694 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4698 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4699 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4701 integer_one_node, 1)))
4708 /* The ranges might be also adjacent between the maximum and
4709 minimum values of the given type. For
4710 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4711 return + [x + 1, y - 1]. */
4712 if (low0 == 0 && high1 == 0)
4714 low = range_successor (high0);
4715 high = range_predecessor (low1);
4716 if (low == 0 || high == 0)
4726 in_p = 0, low = low0, high = high0;
4728 in_p = 0, low = low0, high = high1;
4731 *pin_p = in_p, *plow = low, *phigh = high;
4736 /* Subroutine of fold, looking inside expressions of the form
4737 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4738 of the COND_EXPR. This function is being used also to optimize
4739 A op B ? C : A, by reversing the comparison first.
4741 Return a folded expression whose code is not a COND_EXPR
4742 anymore, or NULL_TREE if no folding opportunity is found. */
4745 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4747 enum tree_code comp_code = TREE_CODE (arg0);
4748 tree arg00 = TREE_OPERAND (arg0, 0);
4749 tree arg01 = TREE_OPERAND (arg0, 1);
4750 tree arg1_type = TREE_TYPE (arg1);
4756 /* If we have A op 0 ? A : -A, consider applying the following
4759 A == 0? A : -A same as -A
4760 A != 0? A : -A same as A
4761 A >= 0? A : -A same as abs (A)
4762 A > 0? A : -A same as abs (A)
4763 A <= 0? A : -A same as -abs (A)
4764 A < 0? A : -A same as -abs (A)
4766 None of these transformations work for modes with signed
4767 zeros. If A is +/-0, the first two transformations will
4768 change the sign of the result (from +0 to -0, or vice
4769 versa). The last four will fix the sign of the result,
4770 even though the original expressions could be positive or
4771 negative, depending on the sign of A.
4773 Note that all these transformations are correct if A is
4774 NaN, since the two alternatives (A and -A) are also NaNs. */
4775 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4776 ? real_zerop (arg01)
4777 : integer_zerop (arg01))
4778 && ((TREE_CODE (arg2) == NEGATE_EXPR
4779 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4780 /* In the case that A is of the form X-Y, '-A' (arg2) may
4781 have already been folded to Y-X, check for that. */
4782 || (TREE_CODE (arg1) == MINUS_EXPR
4783 && TREE_CODE (arg2) == MINUS_EXPR
4784 && operand_equal_p (TREE_OPERAND (arg1, 0),
4785 TREE_OPERAND (arg2, 1), 0)
4786 && operand_equal_p (TREE_OPERAND (arg1, 1),
4787 TREE_OPERAND (arg2, 0), 0))))
4792 tem = fold_convert (arg1_type, arg1);
4793 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4796 return pedantic_non_lvalue (fold_convert (type, arg1));
4799 if (flag_trapping_math)
4804 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4805 arg1 = fold_convert (signed_type_for
4806 (TREE_TYPE (arg1)), arg1);
4807 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4808 return pedantic_non_lvalue (fold_convert (type, tem));
4811 if (flag_trapping_math)
4815 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4816 arg1 = fold_convert (signed_type_for
4817 (TREE_TYPE (arg1)), arg1);
4818 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4819 return negate_expr (fold_convert (type, tem));
4821 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4825 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4826 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4827 both transformations are correct when A is NaN: A != 0
4828 is then true, and A == 0 is false. */
4830 if (integer_zerop (arg01) && integer_zerop (arg2))
4832 if (comp_code == NE_EXPR)
4833 return pedantic_non_lvalue (fold_convert (type, arg1));
4834 else if (comp_code == EQ_EXPR)
4835 return build_int_cst (type, 0);
4838 /* Try some transformations of A op B ? A : B.
4840 A == B? A : B same as B
4841 A != B? A : B same as A
4842 A >= B? A : B same as max (A, B)
4843 A > B? A : B same as max (B, A)
4844 A <= B? A : B same as min (A, B)
4845 A < B? A : B same as min (B, A)
4847 As above, these transformations don't work in the presence
4848 of signed zeros. For example, if A and B are zeros of
4849 opposite sign, the first two transformations will change
4850 the sign of the result. In the last four, the original
4851 expressions give different results for (A=+0, B=-0) and
4852 (A=-0, B=+0), but the transformed expressions do not.
4854 The first two transformations are correct if either A or B
4855 is a NaN. In the first transformation, the condition will
4856 be false, and B will indeed be chosen. In the case of the
4857 second transformation, the condition A != B will be true,
4858 and A will be chosen.
4860 The conversions to max() and min() are not correct if B is
4861 a number and A is not. The conditions in the original
4862 expressions will be false, so all four give B. The min()
4863 and max() versions would give a NaN instead. */
4864 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4865 /* Avoid these transformations if the COND_EXPR may be used
4866 as an lvalue in the C++ front-end. PR c++/19199. */
4868 || (strcmp (lang_hooks.name, "GNU C++") != 0
4869 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4870 || ! maybe_lvalue_p (arg1)
4871 || ! maybe_lvalue_p (arg2)))
4873 tree comp_op0 = arg00;
4874 tree comp_op1 = arg01;
4875 tree comp_type = TREE_TYPE (comp_op0);
4877 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4878 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4888 return pedantic_non_lvalue (fold_convert (type, arg2));
4890 return pedantic_non_lvalue (fold_convert (type, arg1));
4895 /* In C++ a ?: expression can be an lvalue, so put the
4896 operand which will be used if they are equal first
4897 so that we can convert this back to the
4898 corresponding COND_EXPR. */
4899 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4901 comp_op0 = fold_convert (comp_type, comp_op0);
4902 comp_op1 = fold_convert (comp_type, comp_op1);
4903 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4904 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4905 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4906 return pedantic_non_lvalue (fold_convert (type, tem));
4913 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4915 comp_op0 = fold_convert (comp_type, comp_op0);
4916 comp_op1 = fold_convert (comp_type, comp_op1);
4917 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4918 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4919 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4920 return pedantic_non_lvalue (fold_convert (type, tem));
4924 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4925 return pedantic_non_lvalue (fold_convert (type, arg2));
4928 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4929 return pedantic_non_lvalue (fold_convert (type, arg1));
4932 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4937 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4938 we might still be able to simplify this. For example,
4939 if C1 is one less or one more than C2, this might have started
4940 out as a MIN or MAX and been transformed by this function.
4941 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4943 if (INTEGRAL_TYPE_P (type)
4944 && TREE_CODE (arg01) == INTEGER_CST
4945 && TREE_CODE (arg2) == INTEGER_CST)
4949 /* We can replace A with C1 in this case. */
4950 arg1 = fold_convert (type, arg01);
4951 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4954 /* If C1 is C2 + 1, this is min(A, C2). */
4955 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4957 && operand_equal_p (arg01,
4958 const_binop (PLUS_EXPR, arg2,
4959 build_int_cst (type, 1), 0),
4961 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4966 /* If C1 is C2 - 1, this is min(A, C2). */
4967 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4969 && operand_equal_p (arg01,
4970 const_binop (MINUS_EXPR, arg2,
4971 build_int_cst (type, 1), 0),
4973 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4978 /* If C1 is C2 - 1, this is max(A, C2). */
4979 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4981 && operand_equal_p (arg01,
4982 const_binop (MINUS_EXPR, arg2,
4983 build_int_cst (type, 1), 0),
4985 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4990 /* If C1 is C2 + 1, this is max(A, C2). */
4991 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4993 && operand_equal_p (arg01,
4994 const_binop (PLUS_EXPR, arg2,
4995 build_int_cst (type, 1), 0),
4997 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5011 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5012 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5015 /* EXP is some logical combination of boolean tests. See if we can
5016 merge it into some range test. Return the new tree if so. */
5019 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5021 int or_op = (code == TRUTH_ORIF_EXPR
5022 || code == TRUTH_OR_EXPR);
5023 int in0_p, in1_p, in_p;
5024 tree low0, low1, low, high0, high1, high;
5025 bool strict_overflow_p = false;
5026 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5027 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5029 const char * const warnmsg = G_("assuming signed overflow does not occur "
5030 "when simplifying range test");
5032 /* If this is an OR operation, invert both sides; we will invert
5033 again at the end. */
5035 in0_p = ! in0_p, in1_p = ! in1_p;
5037 /* If both expressions are the same, if we can merge the ranges, and we
5038 can build the range test, return it or it inverted. If one of the
5039 ranges is always true or always false, consider it to be the same
5040 expression as the other. */
5041 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5042 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5044 && 0 != (tem = (build_range_check (type,
5046 : rhs != 0 ? rhs : integer_zero_node,
5049 if (strict_overflow_p)
5050 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5051 return or_op ? invert_truthvalue (tem) : tem;
5054 /* On machines where the branch cost is expensive, if this is a
5055 short-circuited branch and the underlying object on both sides
5056 is the same, make a non-short-circuit operation. */
5057 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5058 && lhs != 0 && rhs != 0
5059 && (code == TRUTH_ANDIF_EXPR
5060 || code == TRUTH_ORIF_EXPR)
5061 && operand_equal_p (lhs, rhs, 0))
5063 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5064 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5065 which cases we can't do this. */
5066 if (simple_operand_p (lhs))
5067 return build2 (code == TRUTH_ANDIF_EXPR
5068 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5071 else if (lang_hooks.decls.global_bindings_p () == 0
5072 && ! CONTAINS_PLACEHOLDER_P (lhs))
5074 tree common = save_expr (lhs);
5076 if (0 != (lhs = build_range_check (type, common,
5077 or_op ? ! in0_p : in0_p,
5079 && (0 != (rhs = build_range_check (type, common,
5080 or_op ? ! in1_p : in1_p,
5083 if (strict_overflow_p)
5084 fold_overflow_warning (warnmsg,
5085 WARN_STRICT_OVERFLOW_COMPARISON);
5086 return build2 (code == TRUTH_ANDIF_EXPR
5087 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5096 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5097 bit value. Arrange things so the extra bits will be set to zero if and
5098 only if C is signed-extended to its full width. If MASK is nonzero,
5099 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5102 unextend (tree c, int p, int unsignedp, tree mask)
5104 tree type = TREE_TYPE (c);
5105 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5108 if (p == modesize || unsignedp)
5111 /* We work by getting just the sign bit into the low-order bit, then
5112 into the high-order bit, then sign-extend. We then XOR that value
5114 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5115 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5117 /* We must use a signed type in order to get an arithmetic right shift.
5118 However, we must also avoid introducing accidental overflows, so that
5119 a subsequent call to integer_zerop will work. Hence we must
5120 do the type conversion here. At this point, the constant is either
5121 zero or one, and the conversion to a signed type can never overflow.
5122 We could get an overflow if this conversion is done anywhere else. */
5123 if (TYPE_UNSIGNED (type))
5124 temp = fold_convert (signed_type_for (type), temp);
5126 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5127 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5129 temp = const_binop (BIT_AND_EXPR, temp,
5130 fold_convert (TREE_TYPE (c), mask), 0);
5131 /* If necessary, convert the type back to match the type of C. */
5132 if (TYPE_UNSIGNED (type))
5133 temp = fold_convert (type, temp);
5135 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5138 /* Find ways of folding logical expressions of LHS and RHS:
5139 Try to merge two comparisons to the same innermost item.
5140 Look for range tests like "ch >= '0' && ch <= '9'".
5141 Look for combinations of simple terms on machines with expensive branches
5142 and evaluate the RHS unconditionally.
5144 For example, if we have p->a == 2 && p->b == 4 and we can make an
5145 object large enough to span both A and B, we can do this with a comparison
5146 against the object ANDed with the a mask.
5148 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5149 operations to do this with one comparison.
5151 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5152 function and the one above.
5154 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5155 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5157 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5160 We return the simplified tree or 0 if no optimization is possible. */
5163 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5165 /* If this is the "or" of two comparisons, we can do something if
5166 the comparisons are NE_EXPR. If this is the "and", we can do something
5167 if the comparisons are EQ_EXPR. I.e.,
5168 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5170 WANTED_CODE is this operation code. For single bit fields, we can
5171 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5172 comparison for one-bit fields. */
5174 enum tree_code wanted_code;
5175 enum tree_code lcode, rcode;
5176 tree ll_arg, lr_arg, rl_arg, rr_arg;
5177 tree ll_inner, lr_inner, rl_inner, rr_inner;
5178 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5179 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5180 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5181 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5182 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5183 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5184 enum machine_mode lnmode, rnmode;
5185 tree ll_mask, lr_mask, rl_mask, rr_mask;
5186 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5187 tree l_const, r_const;
5188 tree lntype, rntype, result;
5189 int first_bit, end_bit;
5191 tree orig_lhs = lhs, orig_rhs = rhs;
5192 enum tree_code orig_code = code;
5194 /* Start by getting the comparison codes. Fail if anything is volatile.
5195 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5196 it were surrounded with a NE_EXPR. */
5198 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5201 lcode = TREE_CODE (lhs);
5202 rcode = TREE_CODE (rhs);
5204 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5206 lhs = build2 (NE_EXPR, truth_type, lhs,
5207 build_int_cst (TREE_TYPE (lhs), 0));
5211 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5213 rhs = build2 (NE_EXPR, truth_type, rhs,
5214 build_int_cst (TREE_TYPE (rhs), 0));
5218 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5219 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5222 ll_arg = TREE_OPERAND (lhs, 0);
5223 lr_arg = TREE_OPERAND (lhs, 1);
5224 rl_arg = TREE_OPERAND (rhs, 0);
5225 rr_arg = TREE_OPERAND (rhs, 1);
5227 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5228 if (simple_operand_p (ll_arg)
5229 && simple_operand_p (lr_arg))
5232 if (operand_equal_p (ll_arg, rl_arg, 0)
5233 && operand_equal_p (lr_arg, rr_arg, 0))
5235 result = combine_comparisons (code, lcode, rcode,
5236 truth_type, ll_arg, lr_arg);
5240 else if (operand_equal_p (ll_arg, rr_arg, 0)
5241 && operand_equal_p (lr_arg, rl_arg, 0))
5243 result = combine_comparisons (code, lcode,
5244 swap_tree_comparison (rcode),
5245 truth_type, ll_arg, lr_arg);
5251 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5252 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5254 /* If the RHS can be evaluated unconditionally and its operands are
5255 simple, it wins to evaluate the RHS unconditionally on machines
5256 with expensive branches. In this case, this isn't a comparison
5257 that can be merged. Avoid doing this if the RHS is a floating-point
5258 comparison since those can trap. */
5260 if (BRANCH_COST >= 2
5261 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5262 && simple_operand_p (rl_arg)
5263 && simple_operand_p (rr_arg))
5265 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5266 if (code == TRUTH_OR_EXPR
5267 && lcode == NE_EXPR && integer_zerop (lr_arg)
5268 && rcode == NE_EXPR && integer_zerop (rr_arg)
5269 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5270 return build2 (NE_EXPR, truth_type,
5271 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5273 build_int_cst (TREE_TYPE (ll_arg), 0));
5275 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5276 if (code == TRUTH_AND_EXPR
5277 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5278 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5279 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5280 return build2 (EQ_EXPR, truth_type,
5281 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5283 build_int_cst (TREE_TYPE (ll_arg), 0));
5285 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5287 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5288 return build2 (code, truth_type, lhs, rhs);
5293 /* See if the comparisons can be merged. Then get all the parameters for
5296 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5297 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5301 ll_inner = decode_field_reference (ll_arg,
5302 &ll_bitsize, &ll_bitpos, &ll_mode,
5303 &ll_unsignedp, &volatilep, &ll_mask,
5305 lr_inner = decode_field_reference (lr_arg,
5306 &lr_bitsize, &lr_bitpos, &lr_mode,
5307 &lr_unsignedp, &volatilep, &lr_mask,
5309 rl_inner = decode_field_reference (rl_arg,
5310 &rl_bitsize, &rl_bitpos, &rl_mode,
5311 &rl_unsignedp, &volatilep, &rl_mask,
5313 rr_inner = decode_field_reference (rr_arg,
5314 &rr_bitsize, &rr_bitpos, &rr_mode,
5315 &rr_unsignedp, &volatilep, &rr_mask,
5318 /* It must be true that the inner operation on the lhs of each
5319 comparison must be the same if we are to be able to do anything.
5320 Then see if we have constants. If not, the same must be true for
5322 if (volatilep || ll_inner == 0 || rl_inner == 0
5323 || ! operand_equal_p (ll_inner, rl_inner, 0))
5326 if (TREE_CODE (lr_arg) == INTEGER_CST
5327 && TREE_CODE (rr_arg) == INTEGER_CST)
5328 l_const = lr_arg, r_const = rr_arg;
5329 else if (lr_inner == 0 || rr_inner == 0
5330 || ! operand_equal_p (lr_inner, rr_inner, 0))
5333 l_const = r_const = 0;
5335 /* If either comparison code is not correct for our logical operation,
5336 fail. However, we can convert a one-bit comparison against zero into
5337 the opposite comparison against that bit being set in the field. */
5339 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5340 if (lcode != wanted_code)
5342 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5344 /* Make the left operand unsigned, since we are only interested
5345 in the value of one bit. Otherwise we are doing the wrong
5354 /* This is analogous to the code for l_const above. */
5355 if (rcode != wanted_code)
5357 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5366 /* See if we can find a mode that contains both fields being compared on
5367 the left. If we can't, fail. Otherwise, update all constants and masks
5368 to be relative to a field of that size. */
5369 first_bit = MIN (ll_bitpos, rl_bitpos);
5370 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5371 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5372 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5374 if (lnmode == VOIDmode)
5377 lnbitsize = GET_MODE_BITSIZE (lnmode);
5378 lnbitpos = first_bit & ~ (lnbitsize - 1);
5379 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5380 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5382 if (BYTES_BIG_ENDIAN)
5384 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5385 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5388 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5389 size_int (xll_bitpos), 0);
5390 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5391 size_int (xrl_bitpos), 0);
5395 l_const = fold_convert (lntype, l_const);
5396 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5397 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5398 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5399 fold_build1 (BIT_NOT_EXPR,
5403 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5405 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5410 r_const = fold_convert (lntype, r_const);
5411 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5412 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5413 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5414 fold_build1 (BIT_NOT_EXPR,
5418 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5420 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5424 /* If the right sides are not constant, do the same for it. Also,
5425 disallow this optimization if a size or signedness mismatch occurs
5426 between the left and right sides. */
5429 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5430 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5431 /* Make sure the two fields on the right
5432 correspond to the left without being swapped. */
5433 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5436 first_bit = MIN (lr_bitpos, rr_bitpos);
5437 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5438 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5439 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5441 if (rnmode == VOIDmode)
5444 rnbitsize = GET_MODE_BITSIZE (rnmode);
5445 rnbitpos = first_bit & ~ (rnbitsize - 1);
5446 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5447 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5449 if (BYTES_BIG_ENDIAN)
5451 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5452 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5455 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5456 size_int (xlr_bitpos), 0);
5457 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5458 size_int (xrr_bitpos), 0);
5460 /* Make a mask that corresponds to both fields being compared.
5461 Do this for both items being compared. If the operands are the
5462 same size and the bits being compared are in the same position
5463 then we can do this by masking both and comparing the masked
5465 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5466 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5467 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5469 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5470 ll_unsignedp || rl_unsignedp);
5471 if (! all_ones_mask_p (ll_mask, lnbitsize))
5472 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5474 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5475 lr_unsignedp || rr_unsignedp);
5476 if (! all_ones_mask_p (lr_mask, rnbitsize))
5477 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5479 return build2 (wanted_code, truth_type, lhs, rhs);
5482 /* There is still another way we can do something: If both pairs of
5483 fields being compared are adjacent, we may be able to make a wider
5484 field containing them both.
5486 Note that we still must mask the lhs/rhs expressions. Furthermore,
5487 the mask must be shifted to account for the shift done by
5488 make_bit_field_ref. */
5489 if ((ll_bitsize + ll_bitpos == rl_bitpos
5490 && lr_bitsize + lr_bitpos == rr_bitpos)
5491 || (ll_bitpos == rl_bitpos + rl_bitsize
5492 && lr_bitpos == rr_bitpos + rr_bitsize))
5496 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5497 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5498 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5499 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5501 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5502 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5503 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5504 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5506 /* Convert to the smaller type before masking out unwanted bits. */
5508 if (lntype != rntype)
5510 if (lnbitsize > rnbitsize)
5512 lhs = fold_convert (rntype, lhs);
5513 ll_mask = fold_convert (rntype, ll_mask);
5516 else if (lnbitsize < rnbitsize)
5518 rhs = fold_convert (lntype, rhs);
5519 lr_mask = fold_convert (lntype, lr_mask);
5524 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5525 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5527 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5528 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5530 return build2 (wanted_code, truth_type, lhs, rhs);
5536 /* Handle the case of comparisons with constants. If there is something in
5537 common between the masks, those bits of the constants must be the same.
5538 If not, the condition is always false. Test for this to avoid generating
5539 incorrect code below. */
5540 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5541 if (! integer_zerop (result)
5542 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5543 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5545 if (wanted_code == NE_EXPR)
5547 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5548 return constant_boolean_node (true, truth_type);
5552 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5553 return constant_boolean_node (false, truth_type);
5557 /* Construct the expression we will return. First get the component
5558 reference we will make. Unless the mask is all ones the width of
5559 that field, perform the mask operation. Then compare with the
5561 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5562 ll_unsignedp || rl_unsignedp);
5564 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5565 if (! all_ones_mask_p (ll_mask, lnbitsize))
5566 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5568 return build2 (wanted_code, truth_type, result,
5569 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5572 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5576 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5579 enum tree_code op_code;
5580 tree comp_const = op1;
5582 int consts_equal, consts_lt;
5585 STRIP_SIGN_NOPS (arg0);
5587 op_code = TREE_CODE (arg0);
5588 minmax_const = TREE_OPERAND (arg0, 1);
5589 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5590 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5591 inner = TREE_OPERAND (arg0, 0);
5593 /* If something does not permit us to optimize, return the original tree. */
5594 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5595 || TREE_CODE (comp_const) != INTEGER_CST
5596 || TREE_OVERFLOW (comp_const)
5597 || TREE_CODE (minmax_const) != INTEGER_CST
5598 || TREE_OVERFLOW (minmax_const))
5601 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5602 and GT_EXPR, doing the rest with recursive calls using logical
5606 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5608 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5611 return invert_truthvalue (tem);
5617 fold_build2 (TRUTH_ORIF_EXPR, type,
5618 optimize_minmax_comparison
5619 (EQ_EXPR, type, arg0, comp_const),
5620 optimize_minmax_comparison
5621 (GT_EXPR, type, arg0, comp_const));
5624 if (op_code == MAX_EXPR && consts_equal)
5625 /* MAX (X, 0) == 0 -> X <= 0 */
5626 return fold_build2 (LE_EXPR, type, inner, comp_const);
5628 else if (op_code == MAX_EXPR && consts_lt)
5629 /* MAX (X, 0) == 5 -> X == 5 */
5630 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5632 else if (op_code == MAX_EXPR)
5633 /* MAX (X, 0) == -1 -> false */
5634 return omit_one_operand (type, integer_zero_node, inner);
5636 else if (consts_equal)
5637 /* MIN (X, 0) == 0 -> X >= 0 */
5638 return fold_build2 (GE_EXPR, type, inner, comp_const);
5641 /* MIN (X, 0) == 5 -> false */
5642 return omit_one_operand (type, integer_zero_node, inner);
5645 /* MIN (X, 0) == -1 -> X == -1 */
5646 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5649 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5650 /* MAX (X, 0) > 0 -> X > 0
5651 MAX (X, 0) > 5 -> X > 5 */
5652 return fold_build2 (GT_EXPR, type, inner, comp_const);
5654 else if (op_code == MAX_EXPR)
5655 /* MAX (X, 0) > -1 -> true */
5656 return omit_one_operand (type, integer_one_node, inner);
5658 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5659 /* MIN (X, 0) > 0 -> false
5660 MIN (X, 0) > 5 -> false */
5661 return omit_one_operand (type, integer_zero_node, inner);
5664 /* MIN (X, 0) > -1 -> X > -1 */
5665 return fold_build2 (GT_EXPR, type, inner, comp_const);
5672 /* T is an integer expression that is being multiplied, divided, or taken a
5673 modulus (CODE says which and what kind of divide or modulus) by a
5674 constant C. See if we can eliminate that operation by folding it with
5675 other operations already in T. WIDE_TYPE, if non-null, is a type that
5676 should be used for the computation if wider than our type.
5678 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5679 (X * 2) + (Y * 4). We must, however, be assured that either the original
5680 expression would not overflow or that overflow is undefined for the type
5681 in the language in question.
5683 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5684 the machine has a multiply-accumulate insn or that this is part of an
5685 addressing calculation.
5687 If we return a non-null expression, it is an equivalent form of the
5688 original computation, but need not be in the original type.
5690 We set *STRICT_OVERFLOW_P to true if the return values depends on
5691 signed overflow being undefined. Otherwise we do not change
5692 *STRICT_OVERFLOW_P. */
5695 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5696 bool *strict_overflow_p)
5698 /* To avoid exponential search depth, refuse to allow recursion past
5699 three levels. Beyond that (1) it's highly unlikely that we'll find
5700 something interesting and (2) we've probably processed it before
5701 when we built the inner expression. */
5710 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5717 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5718 bool *strict_overflow_p)
5720 tree type = TREE_TYPE (t);
5721 enum tree_code tcode = TREE_CODE (t);
5722 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5723 > GET_MODE_SIZE (TYPE_MODE (type)))
5724 ? wide_type : type);
5726 int same_p = tcode == code;
5727 tree op0 = NULL_TREE, op1 = NULL_TREE;
5728 bool sub_strict_overflow_p;
5730 /* Don't deal with constants of zero here; they confuse the code below. */
5731 if (integer_zerop (c))
5734 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5735 op0 = TREE_OPERAND (t, 0);
5737 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5738 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5740 /* Note that we need not handle conditional operations here since fold
5741 already handles those cases. So just do arithmetic here. */
5745 /* For a constant, we can always simplify if we are a multiply
5746 or (for divide and modulus) if it is a multiple of our constant. */
5747 if (code == MULT_EXPR
5748 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5749 return const_binop (code, fold_convert (ctype, t),
5750 fold_convert (ctype, c), 0);
5753 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5754 /* If op0 is an expression ... */
5755 if ((COMPARISON_CLASS_P (op0)
5756 || UNARY_CLASS_P (op0)
5757 || BINARY_CLASS_P (op0)
5758 || VL_EXP_CLASS_P (op0)
5759 || EXPRESSION_CLASS_P (op0))
5760 /* ... and is unsigned, and its type is smaller than ctype,
5761 then we cannot pass through as widening. */
5762 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5763 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5764 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5765 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5766 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5767 /* ... or this is a truncation (t is narrower than op0),
5768 then we cannot pass through this narrowing. */
5769 || (GET_MODE_SIZE (TYPE_MODE (type))
5770 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5771 /* ... or signedness changes for division or modulus,
5772 then we cannot pass through this conversion. */
5773 || (code != MULT_EXPR
5774 && (TYPE_UNSIGNED (ctype)
5775 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5778 /* Pass the constant down and see if we can make a simplification. If
5779 we can, replace this expression with the inner simplification for
5780 possible later conversion to our or some other type. */
5781 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5782 && TREE_CODE (t2) == INTEGER_CST
5783 && !TREE_OVERFLOW (t2)
5784 && (0 != (t1 = extract_muldiv (op0, t2, code,
5786 ? ctype : NULL_TREE,
5787 strict_overflow_p))))
5792 /* If widening the type changes it from signed to unsigned, then we
5793 must avoid building ABS_EXPR itself as unsigned. */
5794 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5796 tree cstype = (*signed_type_for) (ctype);
5797 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5800 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5801 return fold_convert (ctype, t1);
5807 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5809 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5812 case MIN_EXPR: case MAX_EXPR:
5813 /* If widening the type changes the signedness, then we can't perform
5814 this optimization as that changes the result. */
5815 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5818 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5819 sub_strict_overflow_p = false;
5820 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5821 &sub_strict_overflow_p)) != 0
5822 && (t2 = extract_muldiv (op1, c, code, wide_type,
5823 &sub_strict_overflow_p)) != 0)
5825 if (tree_int_cst_sgn (c) < 0)
5826 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5827 if (sub_strict_overflow_p)
5828 *strict_overflow_p = true;
5829 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5830 fold_convert (ctype, t2));
5834 case LSHIFT_EXPR: case RSHIFT_EXPR:
5835 /* If the second operand is constant, this is a multiplication
5836 or floor division, by a power of two, so we can treat it that
5837 way unless the multiplier or divisor overflows. Signed
5838 left-shift overflow is implementation-defined rather than
5839 undefined in C90, so do not convert signed left shift into
5841 if (TREE_CODE (op1) == INTEGER_CST
5842 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5843 /* const_binop may not detect overflow correctly,
5844 so check for it explicitly here. */
5845 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5846 && TREE_INT_CST_HIGH (op1) == 0
5847 && 0 != (t1 = fold_convert (ctype,
5848 const_binop (LSHIFT_EXPR,
5851 && !TREE_OVERFLOW (t1))
5852 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5853 ? MULT_EXPR : FLOOR_DIV_EXPR,
5854 ctype, fold_convert (ctype, op0), t1),
5855 c, code, wide_type, strict_overflow_p);
5858 case PLUS_EXPR: case MINUS_EXPR:
5859 /* See if we can eliminate the operation on both sides. If we can, we
5860 can return a new PLUS or MINUS. If we can't, the only remaining
5861 cases where we can do anything are if the second operand is a
5863 sub_strict_overflow_p = false;
5864 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5865 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5866 if (t1 != 0 && t2 != 0
5867 && (code == MULT_EXPR
5868 /* If not multiplication, we can only do this if both operands
5869 are divisible by c. */
5870 || (multiple_of_p (ctype, op0, c)
5871 && multiple_of_p (ctype, op1, c))))
5873 if (sub_strict_overflow_p)
5874 *strict_overflow_p = true;
5875 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5876 fold_convert (ctype, t2));
5879 /* If this was a subtraction, negate OP1 and set it to be an addition.
5880 This simplifies the logic below. */
5881 if (tcode == MINUS_EXPR)
5882 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5884 if (TREE_CODE (op1) != INTEGER_CST)
5887 /* If either OP1 or C are negative, this optimization is not safe for
5888 some of the division and remainder types while for others we need
5889 to change the code. */
5890 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5892 if (code == CEIL_DIV_EXPR)
5893 code = FLOOR_DIV_EXPR;
5894 else if (code == FLOOR_DIV_EXPR)
5895 code = CEIL_DIV_EXPR;
5896 else if (code != MULT_EXPR
5897 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5901 /* If it's a multiply or a division/modulus operation of a multiple
5902 of our constant, do the operation and verify it doesn't overflow. */
5903 if (code == MULT_EXPR
5904 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5906 op1 = const_binop (code, fold_convert (ctype, op1),
5907 fold_convert (ctype, c), 0);
5908 /* We allow the constant to overflow with wrapping semantics. */
5910 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5916 /* If we have an unsigned type is not a sizetype, we cannot widen
5917 the operation since it will change the result if the original
5918 computation overflowed. */
5919 if (TYPE_UNSIGNED (ctype)
5920 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5924 /* If we were able to eliminate our operation from the first side,
5925 apply our operation to the second side and reform the PLUS. */
5926 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5927 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5929 /* The last case is if we are a multiply. In that case, we can
5930 apply the distributive law to commute the multiply and addition
5931 if the multiplication of the constants doesn't overflow. */
5932 if (code == MULT_EXPR)
5933 return fold_build2 (tcode, ctype,
5934 fold_build2 (code, ctype,
5935 fold_convert (ctype, op0),
5936 fold_convert (ctype, c)),
5942 /* We have a special case here if we are doing something like
5943 (C * 8) % 4 since we know that's zero. */
5944 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5945 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5946 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5947 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5948 return omit_one_operand (type, integer_zero_node, op0);
5950 /* ... fall through ... */
5952 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5953 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5954 /* If we can extract our operation from the LHS, do so and return a
5955 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5956 do something only if the second operand is a constant. */
5958 && (t1 = extract_muldiv (op0, c, code, wide_type,
5959 strict_overflow_p)) != 0)
5960 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5961 fold_convert (ctype, op1));
5962 else if (tcode == MULT_EXPR && code == MULT_EXPR
5963 && (t1 = extract_muldiv (op1, c, code, wide_type,
5964 strict_overflow_p)) != 0)
5965 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5966 fold_convert (ctype, t1));
5967 else if (TREE_CODE (op1) != INTEGER_CST)
5970 /* If these are the same operation types, we can associate them
5971 assuming no overflow. */
5973 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5974 fold_convert (ctype, c), 0))
5975 && !TREE_OVERFLOW (t1))
5976 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5978 /* If these operations "cancel" each other, we have the main
5979 optimizations of this pass, which occur when either constant is a
5980 multiple of the other, in which case we replace this with either an
5981 operation or CODE or TCODE.
5983 If we have an unsigned type that is not a sizetype, we cannot do
5984 this since it will change the result if the original computation
5986 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5987 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5988 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5989 || (tcode == MULT_EXPR
5990 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5991 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5993 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5995 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5996 *strict_overflow_p = true;
5997 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5998 fold_convert (ctype,
5999 const_binop (TRUNC_DIV_EXPR,
6002 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6004 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6005 *strict_overflow_p = true;
6006 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6007 fold_convert (ctype,
6008 const_binop (TRUNC_DIV_EXPR,
6021 /* Return a node which has the indicated constant VALUE (either 0 or
6022 1), and is of the indicated TYPE. */
6025 constant_boolean_node (int value, tree type)
6027 if (type == integer_type_node)
6028 return value ? integer_one_node : integer_zero_node;
6029 else if (type == boolean_type_node)
6030 return value ? boolean_true_node : boolean_false_node;
6032 return build_int_cst (type, value);
6036 /* Return true if expr looks like an ARRAY_REF and set base and
6037 offset to the appropriate trees. If there is no offset,
6038 offset is set to NULL_TREE. Base will be canonicalized to
6039 something you can get the element type from using
6040 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
6041 in bytes to the base in sizetype. */
6044 extract_array_ref (tree expr, tree *base, tree *offset)
6046 /* One canonical form is a PLUS_EXPR with the first
6047 argument being an ADDR_EXPR with a possible NOP_EXPR
6049 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
6051 tree op0 = TREE_OPERAND (expr, 0);
6052 tree inner_base, dummy1;
6053 /* Strip NOP_EXPRs here because the C frontends and/or
6054 folders present us (int *)&x.a p+ 4 possibly. */
6056 if (extract_array_ref (op0, &inner_base, &dummy1))
6059 *offset = fold_convert (sizetype, TREE_OPERAND (expr, 1));
6060 if (dummy1 != NULL_TREE)
6061 *offset = fold_build2 (PLUS_EXPR, sizetype,
6066 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
6067 which we transform into an ADDR_EXPR with appropriate
6068 offset. For other arguments to the ADDR_EXPR we assume
6069 zero offset and as such do not care about the ADDR_EXPR
6070 type and strip possible nops from it. */
6071 else if (TREE_CODE (expr) == ADDR_EXPR)
6073 tree op0 = TREE_OPERAND (expr, 0);
6074 if (TREE_CODE (op0) == ARRAY_REF)
6076 tree idx = TREE_OPERAND (op0, 1);
6077 *base = TREE_OPERAND (op0, 0);
6078 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6079 array_ref_element_size (op0));
6080 *offset = fold_convert (sizetype, *offset);
6084 /* Handle array-to-pointer decay as &a. */
6085 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6086 *base = TREE_OPERAND (expr, 0);
6089 *offset = NULL_TREE;
6093 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6094 else if (SSA_VAR_P (expr)
6095 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6098 *offset = NULL_TREE;
6106 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6107 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6108 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6109 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6110 COND is the first argument to CODE; otherwise (as in the example
6111 given here), it is the second argument. TYPE is the type of the
6112 original expression. Return NULL_TREE if no simplification is
6116 fold_binary_op_with_conditional_arg (enum tree_code code,
6117 tree type, tree op0, tree op1,
6118 tree cond, tree arg, int cond_first_p)
6120 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6121 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6122 tree test, true_value, false_value;
6123 tree lhs = NULL_TREE;
6124 tree rhs = NULL_TREE;
6126 /* This transformation is only worthwhile if we don't have to wrap
6127 arg in a SAVE_EXPR, and the operation can be simplified on at least
6128 one of the branches once its pushed inside the COND_EXPR. */
6129 if (!TREE_CONSTANT (arg))
6132 if (TREE_CODE (cond) == COND_EXPR)
6134 test = TREE_OPERAND (cond, 0);
6135 true_value = TREE_OPERAND (cond, 1);
6136 false_value = TREE_OPERAND (cond, 2);
6137 /* If this operand throws an expression, then it does not make
6138 sense to try to perform a logical or arithmetic operation
6140 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6142 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6147 tree testtype = TREE_TYPE (cond);
6149 true_value = constant_boolean_node (true, testtype);
6150 false_value = constant_boolean_node (false, testtype);
6153 arg = fold_convert (arg_type, arg);
6156 true_value = fold_convert (cond_type, true_value);
6158 lhs = fold_build2 (code, type, true_value, arg);
6160 lhs = fold_build2 (code, type, arg, true_value);
6164 false_value = fold_convert (cond_type, false_value);
6166 rhs = fold_build2 (code, type, false_value, arg);
6168 rhs = fold_build2 (code, type, arg, false_value);
6171 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6172 return fold_convert (type, test);
6176 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6178 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6179 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6180 ADDEND is the same as X.
6182 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6183 and finite. The problematic cases are when X is zero, and its mode
6184 has signed zeros. In the case of rounding towards -infinity,
6185 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6186 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6189 fold_real_zero_addition_p (tree type, tree addend, int negate)
6191 if (!real_zerop (addend))
6194 /* Don't allow the fold with -fsignaling-nans. */
6195 if (HONOR_SNANS (TYPE_MODE (type)))
6198 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6199 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6202 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6203 if (TREE_CODE (addend) == REAL_CST
6204 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6207 /* The mode has signed zeros, and we have to honor their sign.
6208 In this situation, there is only one case we can return true for.
6209 X - 0 is the same as X unless rounding towards -infinity is
6211 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6214 /* Subroutine of fold() that checks comparisons of built-in math
6215 functions against real constants.
6217 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6218 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6219 is the type of the result and ARG0 and ARG1 are the operands of the
6220 comparison. ARG1 must be a TREE_REAL_CST.
6222 The function returns the constant folded tree if a simplification
6223 can be made, and NULL_TREE otherwise. */
6226 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6227 tree type, tree arg0, tree arg1)
6231 if (BUILTIN_SQRT_P (fcode))
6233 tree arg = CALL_EXPR_ARG (arg0, 0);
6234 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6236 c = TREE_REAL_CST (arg1);
6237 if (REAL_VALUE_NEGATIVE (c))
6239 /* sqrt(x) < y is always false, if y is negative. */
6240 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6241 return omit_one_operand (type, integer_zero_node, arg);
6243 /* sqrt(x) > y is always true, if y is negative and we
6244 don't care about NaNs, i.e. negative values of x. */
6245 if (code == NE_EXPR || !HONOR_NANS (mode))
6246 return omit_one_operand (type, integer_one_node, arg);
6248 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6249 return fold_build2 (GE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg), dconst0));
6252 else if (code == GT_EXPR || code == GE_EXPR)
6256 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6257 real_convert (&c2, mode, &c2);
6259 if (REAL_VALUE_ISINF (c2))
6261 /* sqrt(x) > y is x == +Inf, when y is very large. */
6262 if (HONOR_INFINITIES (mode))
6263 return fold_build2 (EQ_EXPR, type, arg,
6264 build_real (TREE_TYPE (arg), c2));
6266 /* sqrt(x) > y is always false, when y is very large
6267 and we don't care about infinities. */
6268 return omit_one_operand (type, integer_zero_node, arg);
6271 /* sqrt(x) > c is the same as x > c*c. */
6272 return fold_build2 (code, type, arg,
6273 build_real (TREE_TYPE (arg), c2));
6275 else if (code == LT_EXPR || code == LE_EXPR)
6279 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6280 real_convert (&c2, mode, &c2);
6282 if (REAL_VALUE_ISINF (c2))
6284 /* sqrt(x) < y is always true, when y is a very large
6285 value and we don't care about NaNs or Infinities. */
6286 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6287 return omit_one_operand (type, integer_one_node, arg);
6289 /* sqrt(x) < y is x != +Inf when y is very large and we
6290 don't care about NaNs. */
6291 if (! HONOR_NANS (mode))
6292 return fold_build2 (NE_EXPR, type, arg,
6293 build_real (TREE_TYPE (arg), c2));
6295 /* sqrt(x) < y is x >= 0 when y is very large and we
6296 don't care about Infinities. */
6297 if (! HONOR_INFINITIES (mode))
6298 return fold_build2 (GE_EXPR, type, arg,
6299 build_real (TREE_TYPE (arg), dconst0));
6301 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6302 if (lang_hooks.decls.global_bindings_p () != 0
6303 || CONTAINS_PLACEHOLDER_P (arg))
6306 arg = save_expr (arg);
6307 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6308 fold_build2 (GE_EXPR, type, arg,
6309 build_real (TREE_TYPE (arg),
6311 fold_build2 (NE_EXPR, type, arg,
6312 build_real (TREE_TYPE (arg),
6316 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6317 if (! HONOR_NANS (mode))
6318 return fold_build2 (code, type, arg,
6319 build_real (TREE_TYPE (arg), c2));
6321 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6322 if (lang_hooks.decls.global_bindings_p () == 0
6323 && ! CONTAINS_PLACEHOLDER_P (arg))
6325 arg = save_expr (arg);
6326 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6327 fold_build2 (GE_EXPR, type, arg,
6328 build_real (TREE_TYPE (arg),
6330 fold_build2 (code, type, arg,
6331 build_real (TREE_TYPE (arg),
6340 /* Subroutine of fold() that optimizes comparisons against Infinities,
6341 either +Inf or -Inf.
6343 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6344 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6345 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6347 The function returns the constant folded tree if a simplification
6348 can be made, and NULL_TREE otherwise. */
6351 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6353 enum machine_mode mode;
6354 REAL_VALUE_TYPE max;
6358 mode = TYPE_MODE (TREE_TYPE (arg0));
6360 /* For negative infinity swap the sense of the comparison. */
6361 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6363 code = swap_tree_comparison (code);
6368 /* x > +Inf is always false, if with ignore sNANs. */
6369 if (HONOR_SNANS (mode))
6371 return omit_one_operand (type, integer_zero_node, arg0);
6374 /* x <= +Inf is always true, if we don't case about NaNs. */
6375 if (! HONOR_NANS (mode))
6376 return omit_one_operand (type, integer_one_node, arg0);
6378 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6379 if (lang_hooks.decls.global_bindings_p () == 0
6380 && ! CONTAINS_PLACEHOLDER_P (arg0))
6382 arg0 = save_expr (arg0);
6383 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6389 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6390 real_maxval (&max, neg, mode);
6391 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6392 arg0, build_real (TREE_TYPE (arg0), max));
6395 /* x < +Inf is always equal to x <= DBL_MAX. */
6396 real_maxval (&max, neg, mode);
6397 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6398 arg0, build_real (TREE_TYPE (arg0), max));
6401 /* x != +Inf is always equal to !(x > DBL_MAX). */
6402 real_maxval (&max, neg, mode);
6403 if (! HONOR_NANS (mode))
6404 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6405 arg0, build_real (TREE_TYPE (arg0), max));
6407 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6408 arg0, build_real (TREE_TYPE (arg0), max));
6409 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6418 /* Subroutine of fold() that optimizes comparisons of a division by
6419 a nonzero integer constant against an integer constant, i.e.
6422 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6423 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6424 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6426 The function returns the constant folded tree if a simplification
6427 can be made, and NULL_TREE otherwise. */
6430 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6432 tree prod, tmp, hi, lo;
6433 tree arg00 = TREE_OPERAND (arg0, 0);
6434 tree arg01 = TREE_OPERAND (arg0, 1);
6435 unsigned HOST_WIDE_INT lpart;
6436 HOST_WIDE_INT hpart;
6437 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6441 /* We have to do this the hard way to detect unsigned overflow.
6442 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6443 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6444 TREE_INT_CST_HIGH (arg01),
6445 TREE_INT_CST_LOW (arg1),
6446 TREE_INT_CST_HIGH (arg1),
6447 &lpart, &hpart, unsigned_p);
6448 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6450 neg_overflow = false;
6454 tmp = int_const_binop (MINUS_EXPR, arg01,
6455 build_int_cst (TREE_TYPE (arg01), 1), 0);
6458 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6459 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6460 TREE_INT_CST_HIGH (prod),
6461 TREE_INT_CST_LOW (tmp),
6462 TREE_INT_CST_HIGH (tmp),
6463 &lpart, &hpart, unsigned_p);
6464 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6465 -1, overflow | TREE_OVERFLOW (prod));
6467 else if (tree_int_cst_sgn (arg01) >= 0)
6469 tmp = int_const_binop (MINUS_EXPR, arg01,
6470 build_int_cst (TREE_TYPE (arg01), 1), 0);
6471 switch (tree_int_cst_sgn (arg1))
6474 neg_overflow = true;
6475 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6480 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6485 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6495 /* A negative divisor reverses the relational operators. */
6496 code = swap_tree_comparison (code);
6498 tmp = int_const_binop (PLUS_EXPR, arg01,
6499 build_int_cst (TREE_TYPE (arg01), 1), 0);
6500 switch (tree_int_cst_sgn (arg1))
6503 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6508 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6513 neg_overflow = true;
6514 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6526 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6527 return omit_one_operand (type, integer_zero_node, arg00);
6528 if (TREE_OVERFLOW (hi))
6529 return fold_build2 (GE_EXPR, type, arg00, lo);
6530 if (TREE_OVERFLOW (lo))
6531 return fold_build2 (LE_EXPR, type, arg00, hi);
6532 return build_range_check (type, arg00, 1, lo, hi);
6535 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6536 return omit_one_operand (type, integer_one_node, arg00);
6537 if (TREE_OVERFLOW (hi))
6538 return fold_build2 (LT_EXPR, type, arg00, lo);
6539 if (TREE_OVERFLOW (lo))
6540 return fold_build2 (GT_EXPR, type, arg00, hi);
6541 return build_range_check (type, arg00, 0, lo, hi);
6544 if (TREE_OVERFLOW (lo))
6546 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6547 return omit_one_operand (type, tmp, arg00);
6549 return fold_build2 (LT_EXPR, type, arg00, lo);
6552 if (TREE_OVERFLOW (hi))
6554 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6555 return omit_one_operand (type, tmp, arg00);
6557 return fold_build2 (LE_EXPR, type, arg00, hi);
6560 if (TREE_OVERFLOW (hi))
6562 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6563 return omit_one_operand (type, tmp, arg00);
6565 return fold_build2 (GT_EXPR, type, arg00, hi);
6568 if (TREE_OVERFLOW (lo))
6570 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6571 return omit_one_operand (type, tmp, arg00);
6573 return fold_build2 (GE_EXPR, type, arg00, lo);
6583 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6584 equality/inequality test, then return a simplified form of the test
6585 using a sign testing. Otherwise return NULL. TYPE is the desired
6589 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6592 /* If this is testing a single bit, we can optimize the test. */
6593 if ((code == NE_EXPR || code == EQ_EXPR)
6594 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6595 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6597 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6598 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6599 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6601 if (arg00 != NULL_TREE
6602 /* This is only a win if casting to a signed type is cheap,
6603 i.e. when arg00's type is not a partial mode. */
6604 && TYPE_PRECISION (TREE_TYPE (arg00))
6605 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6607 tree stype = signed_type_for (TREE_TYPE (arg00));
6608 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6609 result_type, fold_convert (stype, arg00),
6610 build_int_cst (stype, 0));
6617 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6618 equality/inequality test, then return a simplified form of
6619 the test using shifts and logical operations. Otherwise return
6620 NULL. TYPE is the desired result type. */
6623 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6626 /* If this is testing a single bit, we can optimize the test. */
6627 if ((code == NE_EXPR || code == EQ_EXPR)
6628 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6629 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6631 tree inner = TREE_OPERAND (arg0, 0);
6632 tree type = TREE_TYPE (arg0);
6633 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6634 enum machine_mode operand_mode = TYPE_MODE (type);
6636 tree signed_type, unsigned_type, intermediate_type;
6639 /* First, see if we can fold the single bit test into a sign-bit
6641 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6646 /* Otherwise we have (A & C) != 0 where C is a single bit,
6647 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6648 Similarly for (A & C) == 0. */
6650 /* If INNER is a right shift of a constant and it plus BITNUM does
6651 not overflow, adjust BITNUM and INNER. */
6652 if (TREE_CODE (inner) == RSHIFT_EXPR
6653 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6654 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6655 && bitnum < TYPE_PRECISION (type)
6656 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6657 bitnum - TYPE_PRECISION (type)))
6659 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6660 inner = TREE_OPERAND (inner, 0);
6663 /* If we are going to be able to omit the AND below, we must do our
6664 operations as unsigned. If we must use the AND, we have a choice.
6665 Normally unsigned is faster, but for some machines signed is. */
6666 #ifdef LOAD_EXTEND_OP
6667 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6668 && !flag_syntax_only) ? 0 : 1;
6673 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6674 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6675 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6676 inner = fold_convert (intermediate_type, inner);
6679 inner = build2 (RSHIFT_EXPR, intermediate_type,
6680 inner, size_int (bitnum));
6682 one = build_int_cst (intermediate_type, 1);
6684 if (code == EQ_EXPR)
6685 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6687 /* Put the AND last so it can combine with more things. */
6688 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6690 /* Make sure to return the proper type. */
6691 inner = fold_convert (result_type, inner);
6698 /* Check whether we are allowed to reorder operands arg0 and arg1,
6699 such that the evaluation of arg1 occurs before arg0. */
6702 reorder_operands_p (tree arg0, tree arg1)
6704 if (! flag_evaluation_order)
6706 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6708 return ! TREE_SIDE_EFFECTS (arg0)
6709 && ! TREE_SIDE_EFFECTS (arg1);
6712 /* Test whether it is preferable two swap two operands, ARG0 and
6713 ARG1, for example because ARG0 is an integer constant and ARG1
6714 isn't. If REORDER is true, only recommend swapping if we can
6715 evaluate the operands in reverse order. */
6718 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6720 STRIP_SIGN_NOPS (arg0);
6721 STRIP_SIGN_NOPS (arg1);
6723 if (TREE_CODE (arg1) == INTEGER_CST)
6725 if (TREE_CODE (arg0) == INTEGER_CST)
6728 if (TREE_CODE (arg1) == REAL_CST)
6730 if (TREE_CODE (arg0) == REAL_CST)
6733 if (TREE_CODE (arg1) == COMPLEX_CST)
6735 if (TREE_CODE (arg0) == COMPLEX_CST)
6738 if (TREE_CONSTANT (arg1))
6740 if (TREE_CONSTANT (arg0))
6746 if (reorder && flag_evaluation_order
6747 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6750 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6751 for commutative and comparison operators. Ensuring a canonical
6752 form allows the optimizers to find additional redundancies without
6753 having to explicitly check for both orderings. */
6754 if (TREE_CODE (arg0) == SSA_NAME
6755 && TREE_CODE (arg1) == SSA_NAME
6756 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6759 /* Put SSA_NAMEs last. */
6760 if (TREE_CODE (arg1) == SSA_NAME)
6762 if (TREE_CODE (arg0) == SSA_NAME)
6765 /* Put variables last. */
6774 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6775 ARG0 is extended to a wider type. */
6778 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6780 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6782 tree shorter_type, outer_type;
6786 if (arg0_unw == arg0)
6788 shorter_type = TREE_TYPE (arg0_unw);
6790 #ifdef HAVE_canonicalize_funcptr_for_compare
6791 /* Disable this optimization if we're casting a function pointer
6792 type on targets that require function pointer canonicalization. */
6793 if (HAVE_canonicalize_funcptr_for_compare
6794 && TREE_CODE (shorter_type) == POINTER_TYPE
6795 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6799 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6802 arg1_unw = get_unwidened (arg1, shorter_type);
6804 /* If possible, express the comparison in the shorter mode. */
6805 if ((code == EQ_EXPR || code == NE_EXPR
6806 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6807 && (TREE_TYPE (arg1_unw) == shorter_type
6808 || (TREE_CODE (arg1_unw) == INTEGER_CST
6809 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6810 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6811 && int_fits_type_p (arg1_unw, shorter_type))))
6812 return fold_build2 (code, type, arg0_unw,
6813 fold_convert (shorter_type, arg1_unw));
6815 if (TREE_CODE (arg1_unw) != INTEGER_CST
6816 || TREE_CODE (shorter_type) != INTEGER_TYPE
6817 || !int_fits_type_p (arg1_unw, shorter_type))
6820 /* If we are comparing with the integer that does not fit into the range
6821 of the shorter type, the result is known. */
6822 outer_type = TREE_TYPE (arg1_unw);
6823 min = lower_bound_in_type (outer_type, shorter_type);
6824 max = upper_bound_in_type (outer_type, shorter_type);
6826 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6828 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6835 return omit_one_operand (type, integer_zero_node, arg0);
6840 return omit_one_operand (type, integer_one_node, arg0);
6846 return omit_one_operand (type, integer_one_node, arg0);
6848 return omit_one_operand (type, integer_zero_node, arg0);
6853 return omit_one_operand (type, integer_zero_node, arg0);
6855 return omit_one_operand (type, integer_one_node, arg0);
6864 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6865 ARG0 just the signedness is changed. */
6868 fold_sign_changed_comparison (enum tree_code code, tree type,
6869 tree arg0, tree arg1)
6872 tree inner_type, outer_type;
6874 if (TREE_CODE (arg0) != NOP_EXPR
6875 && TREE_CODE (arg0) != CONVERT_EXPR)
6878 outer_type = TREE_TYPE (arg0);
6879 arg0_inner = TREE_OPERAND (arg0, 0);
6880 inner_type = TREE_TYPE (arg0_inner);
6882 #ifdef HAVE_canonicalize_funcptr_for_compare
6883 /* Disable this optimization if we're casting a function pointer
6884 type on targets that require function pointer canonicalization. */
6885 if (HAVE_canonicalize_funcptr_for_compare
6886 && TREE_CODE (inner_type) == POINTER_TYPE
6887 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6891 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6894 if (TREE_CODE (arg1) != INTEGER_CST
6895 && !((TREE_CODE (arg1) == NOP_EXPR
6896 || TREE_CODE (arg1) == CONVERT_EXPR)
6897 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6900 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6905 if (TREE_CODE (arg1) == INTEGER_CST)
6906 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6907 TREE_INT_CST_HIGH (arg1), 0,
6908 TREE_OVERFLOW (arg1));
6910 arg1 = fold_convert (inner_type, arg1);
6912 return fold_build2 (code, type, arg0_inner, arg1);
6915 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6916 step of the array. Reconstructs s and delta in the case of s * delta
6917 being an integer constant (and thus already folded).
6918 ADDR is the address. MULT is the multiplicative expression.
6919 If the function succeeds, the new address expression is returned. Otherwise
6920 NULL_TREE is returned. */
6923 try_move_mult_to_index (tree addr, tree op1)
6925 tree s, delta, step;
6926 tree ref = TREE_OPERAND (addr, 0), pref;
6931 /* Strip the nops that might be added when converting op1 to sizetype. */
6934 /* Canonicalize op1 into a possibly non-constant delta
6935 and an INTEGER_CST s. */
6936 if (TREE_CODE (op1) == MULT_EXPR)
6938 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6943 if (TREE_CODE (arg0) == INTEGER_CST)
6948 else if (TREE_CODE (arg1) == INTEGER_CST)
6956 else if (TREE_CODE (op1) == INTEGER_CST)
6963 /* Simulate we are delta * 1. */
6965 s = integer_one_node;
6968 for (;; ref = TREE_OPERAND (ref, 0))
6970 if (TREE_CODE (ref) == ARRAY_REF)
6972 /* Remember if this was a multi-dimensional array. */
6973 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6976 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6980 step = array_ref_element_size (ref);
6981 if (TREE_CODE (step) != INTEGER_CST)
6986 if (! tree_int_cst_equal (step, s))
6991 /* Try if delta is a multiple of step. */
6992 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6998 /* Only fold here if we can verify we do not overflow one
6999 dimension of a multi-dimensional array. */
7004 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7005 || !INTEGRAL_TYPE_P (itype)
7006 || !TYPE_MAX_VALUE (itype)
7007 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7010 tmp = fold_binary (PLUS_EXPR, itype,
7011 fold_convert (itype,
7012 TREE_OPERAND (ref, 1)),
7013 fold_convert (itype, delta));
7015 || TREE_CODE (tmp) != INTEGER_CST
7016 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7025 if (!handled_component_p (ref))
7029 /* We found the suitable array reference. So copy everything up to it,
7030 and replace the index. */
7032 pref = TREE_OPERAND (addr, 0);
7033 ret = copy_node (pref);
7038 pref = TREE_OPERAND (pref, 0);
7039 TREE_OPERAND (pos, 0) = copy_node (pref);
7040 pos = TREE_OPERAND (pos, 0);
7043 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7044 fold_convert (itype,
7045 TREE_OPERAND (pos, 1)),
7046 fold_convert (itype, delta));
7048 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7052 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7053 means A >= Y && A != MAX, but in this case we know that
7054 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7057 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7059 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7061 if (TREE_CODE (bound) == LT_EXPR)
7062 a = TREE_OPERAND (bound, 0);
7063 else if (TREE_CODE (bound) == GT_EXPR)
7064 a = TREE_OPERAND (bound, 1);
7068 typea = TREE_TYPE (a);
7069 if (!INTEGRAL_TYPE_P (typea)
7070 && !POINTER_TYPE_P (typea))
7073 if (TREE_CODE (ineq) == LT_EXPR)
7075 a1 = TREE_OPERAND (ineq, 1);
7076 y = TREE_OPERAND (ineq, 0);
7078 else if (TREE_CODE (ineq) == GT_EXPR)
7080 a1 = TREE_OPERAND (ineq, 0);
7081 y = TREE_OPERAND (ineq, 1);
7086 if (TREE_TYPE (a1) != typea)
7089 if (POINTER_TYPE_P (typea))
7091 /* Convert the pointer types into integer before taking the difference. */
7092 tree ta = fold_convert (ssizetype, a);
7093 tree ta1 = fold_convert (ssizetype, a1);
7094 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7097 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7099 if (!diff || !integer_onep (diff))
7102 return fold_build2 (GE_EXPR, type, a, y);
7105 /* Fold a sum or difference of at least one multiplication.
7106 Returns the folded tree or NULL if no simplification could be made. */
7109 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7111 tree arg00, arg01, arg10, arg11;
7112 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7114 /* (A * C) +- (B * C) -> (A+-B) * C.
7115 (A * C) +- A -> A * (C+-1).
7116 We are most concerned about the case where C is a constant,
7117 but other combinations show up during loop reduction. Since
7118 it is not difficult, try all four possibilities. */
7120 if (TREE_CODE (arg0) == MULT_EXPR)
7122 arg00 = TREE_OPERAND (arg0, 0);
7123 arg01 = TREE_OPERAND (arg0, 1);
7128 arg01 = build_one_cst (type);
7130 if (TREE_CODE (arg1) == MULT_EXPR)
7132 arg10 = TREE_OPERAND (arg1, 0);
7133 arg11 = TREE_OPERAND (arg1, 1);
7138 arg11 = build_one_cst (type);
7142 if (operand_equal_p (arg01, arg11, 0))
7143 same = arg01, alt0 = arg00, alt1 = arg10;
7144 else if (operand_equal_p (arg00, arg10, 0))
7145 same = arg00, alt0 = arg01, alt1 = arg11;
7146 else if (operand_equal_p (arg00, arg11, 0))
7147 same = arg00, alt0 = arg01, alt1 = arg10;
7148 else if (operand_equal_p (arg01, arg10, 0))
7149 same = arg01, alt0 = arg00, alt1 = arg11;
7151 /* No identical multiplicands; see if we can find a common
7152 power-of-two factor in non-power-of-two multiplies. This
7153 can help in multi-dimensional array access. */
7154 else if (host_integerp (arg01, 0)
7155 && host_integerp (arg11, 0))
7157 HOST_WIDE_INT int01, int11, tmp;
7160 int01 = TREE_INT_CST_LOW (arg01);
7161 int11 = TREE_INT_CST_LOW (arg11);
7163 /* Move min of absolute values to int11. */
7164 if ((int01 >= 0 ? int01 : -int01)
7165 < (int11 >= 0 ? int11 : -int11))
7167 tmp = int01, int01 = int11, int11 = tmp;
7168 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7175 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7177 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7178 build_int_cst (TREE_TYPE (arg00),
7183 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7188 return fold_build2 (MULT_EXPR, type,
7189 fold_build2 (code, type,
7190 fold_convert (type, alt0),
7191 fold_convert (type, alt1)),
7192 fold_convert (type, same));
7197 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7198 specified by EXPR into the buffer PTR of length LEN bytes.
7199 Return the number of bytes placed in the buffer, or zero
7203 native_encode_int (tree expr, unsigned char *ptr, int len)
7205 tree type = TREE_TYPE (expr);
7206 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7207 int byte, offset, word, words;
7208 unsigned char value;
7210 if (total_bytes > len)
7212 words = total_bytes / UNITS_PER_WORD;
7214 for (byte = 0; byte < total_bytes; byte++)
7216 int bitpos = byte * BITS_PER_UNIT;
7217 if (bitpos < HOST_BITS_PER_WIDE_INT)
7218 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7220 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7221 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7223 if (total_bytes > UNITS_PER_WORD)
7225 word = byte / UNITS_PER_WORD;
7226 if (WORDS_BIG_ENDIAN)
7227 word = (words - 1) - word;
7228 offset = word * UNITS_PER_WORD;
7229 if (BYTES_BIG_ENDIAN)
7230 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7232 offset += byte % UNITS_PER_WORD;
7235 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7236 ptr[offset] = value;
7242 /* Subroutine of native_encode_expr. Encode the REAL_CST
7243 specified by EXPR into the buffer PTR of length LEN bytes.
7244 Return the number of bytes placed in the buffer, or zero
7248 native_encode_real (tree expr, unsigned char *ptr, int len)
7250 tree type = TREE_TYPE (expr);
7251 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7252 int byte, offset, word, words, bitpos;
7253 unsigned char value;
7255 /* There are always 32 bits in each long, no matter the size of
7256 the hosts long. We handle floating point representations with
7260 if (total_bytes > len)
7262 words = 32 / UNITS_PER_WORD;
7264 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7266 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7267 bitpos += BITS_PER_UNIT)
7269 byte = (bitpos / BITS_PER_UNIT) & 3;
7270 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7272 if (UNITS_PER_WORD < 4)
7274 word = byte / UNITS_PER_WORD;
7275 if (WORDS_BIG_ENDIAN)
7276 word = (words - 1) - word;
7277 offset = word * UNITS_PER_WORD;
7278 if (BYTES_BIG_ENDIAN)
7279 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7281 offset += byte % UNITS_PER_WORD;
7284 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7285 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7290 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7291 specified by EXPR into the buffer PTR of length LEN bytes.
7292 Return the number of bytes placed in the buffer, or zero
7296 native_encode_complex (tree expr, unsigned char *ptr, int len)
7301 part = TREE_REALPART (expr);
7302 rsize = native_encode_expr (part, ptr, len);
7305 part = TREE_IMAGPART (expr);
7306 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7309 return rsize + isize;
7313 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7314 specified by EXPR into the buffer PTR of length LEN bytes.
7315 Return the number of bytes placed in the buffer, or zero
7319 native_encode_vector (tree expr, unsigned char *ptr, int len)
7321 int i, size, offset, count;
7322 tree itype, elem, elements;
7325 elements = TREE_VECTOR_CST_ELTS (expr);
7326 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7327 itype = TREE_TYPE (TREE_TYPE (expr));
7328 size = GET_MODE_SIZE (TYPE_MODE (itype));
7329 for (i = 0; i < count; i++)
7333 elem = TREE_VALUE (elements);
7334 elements = TREE_CHAIN (elements);
7341 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7346 if (offset + size > len)
7348 memset (ptr+offset, 0, size);
7356 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7357 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7358 buffer PTR of length LEN bytes. Return the number of bytes
7359 placed in the buffer, or zero upon failure. */
7362 native_encode_expr (tree expr, unsigned char *ptr, int len)
7364 switch (TREE_CODE (expr))
7367 return native_encode_int (expr, ptr, len);
7370 return native_encode_real (expr, ptr, len);
7373 return native_encode_complex (expr, ptr, len);
7376 return native_encode_vector (expr, ptr, len);
7384 /* Subroutine of native_interpret_expr. Interpret the contents of
7385 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7386 If the buffer cannot be interpreted, return NULL_TREE. */
7389 native_interpret_int (tree type, unsigned char *ptr, int len)
7391 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7392 int byte, offset, word, words;
7393 unsigned char value;
7394 unsigned int HOST_WIDE_INT lo = 0;
7395 HOST_WIDE_INT hi = 0;
7397 if (total_bytes > len)
7399 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7401 words = total_bytes / UNITS_PER_WORD;
7403 for (byte = 0; byte < total_bytes; byte++)
7405 int bitpos = byte * BITS_PER_UNIT;
7406 if (total_bytes > UNITS_PER_WORD)
7408 word = byte / UNITS_PER_WORD;
7409 if (WORDS_BIG_ENDIAN)
7410 word = (words - 1) - word;
7411 offset = word * UNITS_PER_WORD;
7412 if (BYTES_BIG_ENDIAN)
7413 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7415 offset += byte % UNITS_PER_WORD;
7418 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7419 value = ptr[offset];
7421 if (bitpos < HOST_BITS_PER_WIDE_INT)
7422 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7424 hi |= (unsigned HOST_WIDE_INT) value
7425 << (bitpos - HOST_BITS_PER_WIDE_INT);
7428 return build_int_cst_wide_type (type, lo, hi);
7432 /* Subroutine of native_interpret_expr. Interpret the contents of
7433 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7434 If the buffer cannot be interpreted, return NULL_TREE. */
7437 native_interpret_real (tree type, unsigned char *ptr, int len)
7439 enum machine_mode mode = TYPE_MODE (type);
7440 int total_bytes = GET_MODE_SIZE (mode);
7441 int byte, offset, word, words, bitpos;
7442 unsigned char value;
7443 /* There are always 32 bits in each long, no matter the size of
7444 the hosts long. We handle floating point representations with
7449 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7450 if (total_bytes > len || total_bytes > 24)
7452 words = 32 / UNITS_PER_WORD;
7454 memset (tmp, 0, sizeof (tmp));
7455 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7456 bitpos += BITS_PER_UNIT)
7458 byte = (bitpos / BITS_PER_UNIT) & 3;
7459 if (UNITS_PER_WORD < 4)
7461 word = byte / UNITS_PER_WORD;
7462 if (WORDS_BIG_ENDIAN)
7463 word = (words - 1) - word;
7464 offset = word * UNITS_PER_WORD;
7465 if (BYTES_BIG_ENDIAN)
7466 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7468 offset += byte % UNITS_PER_WORD;
7471 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7472 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7474 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7477 real_from_target (&r, tmp, mode);
7478 return build_real (type, r);
7482 /* Subroutine of native_interpret_expr. Interpret the contents of
7483 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7484 If the buffer cannot be interpreted, return NULL_TREE. */
7487 native_interpret_complex (tree type, unsigned char *ptr, int len)
7489 tree etype, rpart, ipart;
7492 etype = TREE_TYPE (type);
7493 size = GET_MODE_SIZE (TYPE_MODE (etype));
7496 rpart = native_interpret_expr (etype, ptr, size);
7499 ipart = native_interpret_expr (etype, ptr+size, size);
7502 return build_complex (type, rpart, ipart);
7506 /* Subroutine of native_interpret_expr. Interpret the contents of
7507 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7508 If the buffer cannot be interpreted, return NULL_TREE. */
7511 native_interpret_vector (tree type, unsigned char *ptr, int len)
7513 tree etype, elem, elements;
7516 etype = TREE_TYPE (type);
7517 size = GET_MODE_SIZE (TYPE_MODE (etype));
7518 count = TYPE_VECTOR_SUBPARTS (type);
7519 if (size * count > len)
7522 elements = NULL_TREE;
7523 for (i = count - 1; i >= 0; i--)
7525 elem = native_interpret_expr (etype, ptr+(i*size), size);
7528 elements = tree_cons (NULL_TREE, elem, elements);
7530 return build_vector (type, elements);
7534 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7535 the buffer PTR of length LEN as a constant of type TYPE. For
7536 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7537 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7538 return NULL_TREE. */
7541 native_interpret_expr (tree type, unsigned char *ptr, int len)
7543 switch (TREE_CODE (type))
7548 return native_interpret_int (type, ptr, len);
7551 return native_interpret_real (type, ptr, len);
7554 return native_interpret_complex (type, ptr, len);
7557 return native_interpret_vector (type, ptr, len);
7565 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7566 TYPE at compile-time. If we're unable to perform the conversion
7567 return NULL_TREE. */
7570 fold_view_convert_expr (tree type, tree expr)
7572 /* We support up to 512-bit values (for V8DFmode). */
7573 unsigned char buffer[64];
7576 /* Check that the host and target are sane. */
7577 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7580 len = native_encode_expr (expr, buffer, sizeof (buffer));
7584 return native_interpret_expr (type, buffer, len);
7587 /* Build an expression for the address of T. Folds away INDIRECT_REF
7588 to avoid confusing the gimplify process. When IN_FOLD is true
7589 avoid modifications of T. */
7592 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7594 /* The size of the object is not relevant when talking about its address. */
7595 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7596 t = TREE_OPERAND (t, 0);
7598 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7599 if (TREE_CODE (t) == INDIRECT_REF
7600 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7602 t = TREE_OPERAND (t, 0);
7604 if (TREE_TYPE (t) != ptrtype)
7605 t = build1 (NOP_EXPR, ptrtype, t);
7611 while (handled_component_p (base))
7612 base = TREE_OPERAND (base, 0);
7615 TREE_ADDRESSABLE (base) = 1;
7617 t = build1 (ADDR_EXPR, ptrtype, t);
7620 t = build1 (ADDR_EXPR, ptrtype, t);
7625 /* Build an expression for the address of T with type PTRTYPE. This
7626 function modifies the input parameter 'T' by sometimes setting the
7627 TREE_ADDRESSABLE flag. */
7630 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7632 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7635 /* Build an expression for the address of T. This function modifies
7636 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7637 flag. When called from fold functions, use fold_addr_expr instead. */
7640 build_fold_addr_expr (tree t)
7642 return build_fold_addr_expr_with_type_1 (t,
7643 build_pointer_type (TREE_TYPE (t)),
7647 /* Same as build_fold_addr_expr, builds an expression for the address
7648 of T, but avoids touching the input node 't'. Fold functions
7649 should use this version. */
7652 fold_addr_expr (tree t)
7654 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7656 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7659 /* Fold a unary expression of code CODE and type TYPE with operand
7660 OP0. Return the folded expression if folding is successful.
7661 Otherwise, return NULL_TREE. */
7664 fold_unary (enum tree_code code, tree type, tree op0)
7668 enum tree_code_class kind = TREE_CODE_CLASS (code);
7670 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7671 && TREE_CODE_LENGTH (code) == 1);
7676 if (code == NOP_EXPR || code == CONVERT_EXPR
7677 || code == FLOAT_EXPR || code == ABS_EXPR)
7679 /* Don't use STRIP_NOPS, because signedness of argument type
7681 STRIP_SIGN_NOPS (arg0);
7685 /* Strip any conversions that don't change the mode. This
7686 is safe for every expression, except for a comparison
7687 expression because its signedness is derived from its
7690 Note that this is done as an internal manipulation within
7691 the constant folder, in order to find the simplest
7692 representation of the arguments so that their form can be
7693 studied. In any cases, the appropriate type conversions
7694 should be put back in the tree that will get out of the
7700 if (TREE_CODE_CLASS (code) == tcc_unary)
7702 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7703 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7704 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7705 else if (TREE_CODE (arg0) == COND_EXPR)
7707 tree arg01 = TREE_OPERAND (arg0, 1);
7708 tree arg02 = TREE_OPERAND (arg0, 2);
7709 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7710 arg01 = fold_build1 (code, type, arg01);
7711 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7712 arg02 = fold_build1 (code, type, arg02);
7713 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7716 /* If this was a conversion, and all we did was to move into
7717 inside the COND_EXPR, bring it back out. But leave it if
7718 it is a conversion from integer to integer and the
7719 result precision is no wider than a word since such a
7720 conversion is cheap and may be optimized away by combine,
7721 while it couldn't if it were outside the COND_EXPR. Then return
7722 so we don't get into an infinite recursion loop taking the
7723 conversion out and then back in. */
7725 if ((code == NOP_EXPR || code == CONVERT_EXPR
7726 || code == NON_LVALUE_EXPR)
7727 && TREE_CODE (tem) == COND_EXPR
7728 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7729 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7730 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7731 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7732 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7733 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7734 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7736 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7737 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7738 || flag_syntax_only))
7739 tem = build1 (code, type,
7741 TREE_TYPE (TREE_OPERAND
7742 (TREE_OPERAND (tem, 1), 0)),
7743 TREE_OPERAND (tem, 0),
7744 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7745 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7748 else if (COMPARISON_CLASS_P (arg0))
7750 if (TREE_CODE (type) == BOOLEAN_TYPE)
7752 arg0 = copy_node (arg0);
7753 TREE_TYPE (arg0) = type;
7756 else if (TREE_CODE (type) != INTEGER_TYPE)
7757 return fold_build3 (COND_EXPR, type, arg0,
7758 fold_build1 (code, type,
7760 fold_build1 (code, type,
7761 integer_zero_node));
7770 case FIX_TRUNC_EXPR:
7771 if (TREE_TYPE (op0) == type)
7774 /* If we have (type) (a CMP b) and type is an integral type, return
7775 new expression involving the new type. */
7776 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7777 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7778 TREE_OPERAND (op0, 1));
7780 /* Handle cases of two conversions in a row. */
7781 if (TREE_CODE (op0) == NOP_EXPR
7782 || TREE_CODE (op0) == CONVERT_EXPR)
7784 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7785 tree inter_type = TREE_TYPE (op0);
7786 int inside_int = INTEGRAL_TYPE_P (inside_type);
7787 int inside_ptr = POINTER_TYPE_P (inside_type);
7788 int inside_float = FLOAT_TYPE_P (inside_type);
7789 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7790 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7791 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7792 int inter_int = INTEGRAL_TYPE_P (inter_type);
7793 int inter_ptr = POINTER_TYPE_P (inter_type);
7794 int inter_float = FLOAT_TYPE_P (inter_type);
7795 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7796 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7797 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7798 int final_int = INTEGRAL_TYPE_P (type);
7799 int final_ptr = POINTER_TYPE_P (type);
7800 int final_float = FLOAT_TYPE_P (type);
7801 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7802 unsigned int final_prec = TYPE_PRECISION (type);
7803 int final_unsignedp = TYPE_UNSIGNED (type);
7805 /* In addition to the cases of two conversions in a row
7806 handled below, if we are converting something to its own
7807 type via an object of identical or wider precision, neither
7808 conversion is needed. */
7809 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7810 && (((inter_int || inter_ptr) && final_int)
7811 || (inter_float && final_float))
7812 && inter_prec >= final_prec)
7813 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7815 /* Likewise, if the intermediate and final types are either both
7816 float or both integer, we don't need the middle conversion if
7817 it is wider than the final type and doesn't change the signedness
7818 (for integers). Avoid this if the final type is a pointer
7819 since then we sometimes need the inner conversion. Likewise if
7820 the outer has a precision not equal to the size of its mode. */
7821 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7822 || (inter_float && inside_float)
7823 || (inter_vec && inside_vec))
7824 && inter_prec >= inside_prec
7825 && (inter_float || inter_vec
7826 || inter_unsignedp == inside_unsignedp)
7827 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7828 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7830 && (! final_vec || inter_prec == inside_prec))
7831 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7833 /* If we have a sign-extension of a zero-extended value, we can
7834 replace that by a single zero-extension. */
7835 if (inside_int && inter_int && final_int
7836 && inside_prec < inter_prec && inter_prec < final_prec
7837 && inside_unsignedp && !inter_unsignedp)
7838 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7840 /* Two conversions in a row are not needed unless:
7841 - some conversion is floating-point (overstrict for now), or
7842 - some conversion is a vector (overstrict for now), or
7843 - the intermediate type is narrower than both initial and
7845 - the intermediate type and innermost type differ in signedness,
7846 and the outermost type is wider than the intermediate, or
7847 - the initial type is a pointer type and the precisions of the
7848 intermediate and final types differ, or
7849 - the final type is a pointer type and the precisions of the
7850 initial and intermediate types differ.
7851 - the final type is a pointer type and the initial type not
7852 - the initial type is a pointer to an array and the final type
7854 if (! inside_float && ! inter_float && ! final_float
7855 && ! inside_vec && ! inter_vec && ! final_vec
7856 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7857 && ! (inside_int && inter_int
7858 && inter_unsignedp != inside_unsignedp
7859 && inter_prec < final_prec)
7860 && ((inter_unsignedp && inter_prec > inside_prec)
7861 == (final_unsignedp && final_prec > inter_prec))
7862 && ! (inside_ptr && inter_prec != final_prec)
7863 && ! (final_ptr && inside_prec != inter_prec)
7864 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7865 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7866 && final_ptr == inside_ptr
7868 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7869 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7870 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7873 /* Handle (T *)&A.B.C for A being of type T and B and C
7874 living at offset zero. This occurs frequently in
7875 C++ upcasting and then accessing the base. */
7876 if (TREE_CODE (op0) == ADDR_EXPR
7877 && POINTER_TYPE_P (type)
7878 && handled_component_p (TREE_OPERAND (op0, 0)))
7880 HOST_WIDE_INT bitsize, bitpos;
7882 enum machine_mode mode;
7883 int unsignedp, volatilep;
7884 tree base = TREE_OPERAND (op0, 0);
7885 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7886 &mode, &unsignedp, &volatilep, false);
7887 /* If the reference was to a (constant) zero offset, we can use
7888 the address of the base if it has the same base type
7889 as the result type. */
7890 if (! offset && bitpos == 0
7891 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7892 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7893 return fold_convert (type, fold_addr_expr (base));
7896 if ((TREE_CODE (op0) == MODIFY_EXPR
7897 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7898 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7899 /* Detect assigning a bitfield. */
7900 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7902 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7904 /* Don't leave an assignment inside a conversion
7905 unless assigning a bitfield. */
7906 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7907 /* First do the assignment, then return converted constant. */
7908 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7909 TREE_NO_WARNING (tem) = 1;
7910 TREE_USED (tem) = 1;
7914 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7915 constants (if x has signed type, the sign bit cannot be set
7916 in c). This folds extension into the BIT_AND_EXPR. */
7917 if (INTEGRAL_TYPE_P (type)
7918 && TREE_CODE (type) != BOOLEAN_TYPE
7919 && TREE_CODE (op0) == BIT_AND_EXPR
7920 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7923 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7926 if (TYPE_UNSIGNED (TREE_TYPE (and))
7927 || (TYPE_PRECISION (type)
7928 <= TYPE_PRECISION (TREE_TYPE (and))))
7930 else if (TYPE_PRECISION (TREE_TYPE (and1))
7931 <= HOST_BITS_PER_WIDE_INT
7932 && host_integerp (and1, 1))
7934 unsigned HOST_WIDE_INT cst;
7936 cst = tree_low_cst (and1, 1);
7937 cst &= (HOST_WIDE_INT) -1
7938 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7939 change = (cst == 0);
7940 #ifdef LOAD_EXTEND_OP
7942 && !flag_syntax_only
7943 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7946 tree uns = unsigned_type_for (TREE_TYPE (and0));
7947 and0 = fold_convert (uns, and0);
7948 and1 = fold_convert (uns, and1);
7954 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7955 TREE_INT_CST_HIGH (and1), 0,
7956 TREE_OVERFLOW (and1));
7957 return fold_build2 (BIT_AND_EXPR, type,
7958 fold_convert (type, and0), tem);
7962 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7963 when one of the new casts will fold away. Conservatively we assume
7964 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7965 if (POINTER_TYPE_P (type)
7966 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7967 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7968 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7969 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7971 tree arg00 = TREE_OPERAND (arg0, 0);
7972 tree arg01 = TREE_OPERAND (arg0, 1);
7974 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7975 fold_convert (sizetype, arg01));
7978 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7979 of the same precision, and X is an integer type not narrower than
7980 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7981 if (INTEGRAL_TYPE_P (type)
7982 && TREE_CODE (op0) == BIT_NOT_EXPR
7983 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7984 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7985 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7986 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7988 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7989 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7990 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7991 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7994 tem = fold_convert_const (code, type, op0);
7995 return tem ? tem : NULL_TREE;
7997 case VIEW_CONVERT_EXPR:
7998 if (TREE_TYPE (op0) == type)
8000 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8001 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8002 return fold_view_convert_expr (type, op0);
8005 tem = fold_negate_expr (arg0);
8007 return fold_convert (type, tem);
8011 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8012 return fold_abs_const (arg0, type);
8013 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8014 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8015 /* Convert fabs((double)float) into (double)fabsf(float). */
8016 else if (TREE_CODE (arg0) == NOP_EXPR
8017 && TREE_CODE (type) == REAL_TYPE)
8019 tree targ0 = strip_float_extensions (arg0);
8021 return fold_convert (type, fold_build1 (ABS_EXPR,
8025 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8026 else if (TREE_CODE (arg0) == ABS_EXPR)
8028 else if (tree_expr_nonnegative_p (arg0))
8031 /* Strip sign ops from argument. */
8032 if (TREE_CODE (type) == REAL_TYPE)
8034 tem = fold_strip_sign_ops (arg0);
8036 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8041 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8042 return fold_convert (type, arg0);
8043 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8045 tree itype = TREE_TYPE (type);
8046 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8047 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8048 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8050 if (TREE_CODE (arg0) == COMPLEX_CST)
8052 tree itype = TREE_TYPE (type);
8053 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8054 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8055 return build_complex (type, rpart, negate_expr (ipart));
8057 if (TREE_CODE (arg0) == CONJ_EXPR)
8058 return fold_convert (type, TREE_OPERAND (arg0, 0));
8062 if (TREE_CODE (arg0) == INTEGER_CST)
8063 return fold_not_const (arg0, type);
8064 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8065 return TREE_OPERAND (arg0, 0);
8066 /* Convert ~ (-A) to A - 1. */
8067 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8068 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
8069 build_int_cst (type, 1));
8070 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8071 else if (INTEGRAL_TYPE_P (type)
8072 && ((TREE_CODE (arg0) == MINUS_EXPR
8073 && integer_onep (TREE_OPERAND (arg0, 1)))
8074 || (TREE_CODE (arg0) == PLUS_EXPR
8075 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8076 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8077 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8078 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8079 && (tem = fold_unary (BIT_NOT_EXPR, type,
8081 TREE_OPERAND (arg0, 0)))))
8082 return fold_build2 (BIT_XOR_EXPR, type, tem,
8083 fold_convert (type, TREE_OPERAND (arg0, 1)));
8084 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8085 && (tem = fold_unary (BIT_NOT_EXPR, type,
8087 TREE_OPERAND (arg0, 1)))))
8088 return fold_build2 (BIT_XOR_EXPR, type,
8089 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8093 case TRUTH_NOT_EXPR:
8094 /* The argument to invert_truthvalue must have Boolean type. */
8095 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8096 arg0 = fold_convert (boolean_type_node, arg0);
8098 /* Note that the operand of this must be an int
8099 and its values must be 0 or 1.
8100 ("true" is a fixed value perhaps depending on the language,
8101 but we don't handle values other than 1 correctly yet.) */
8102 tem = fold_truth_not_expr (arg0);
8105 return fold_convert (type, tem);
8108 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8109 return fold_convert (type, arg0);
8110 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8111 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8112 TREE_OPERAND (arg0, 1));
8113 if (TREE_CODE (arg0) == COMPLEX_CST)
8114 return fold_convert (type, TREE_REALPART (arg0));
8115 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8117 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8118 tem = fold_build2 (TREE_CODE (arg0), itype,
8119 fold_build1 (REALPART_EXPR, itype,
8120 TREE_OPERAND (arg0, 0)),
8121 fold_build1 (REALPART_EXPR, itype,
8122 TREE_OPERAND (arg0, 1)));
8123 return fold_convert (type, tem);
8125 if (TREE_CODE (arg0) == CONJ_EXPR)
8127 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8128 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8129 return fold_convert (type, tem);
8131 if (TREE_CODE (arg0) == CALL_EXPR)
8133 tree fn = get_callee_fndecl (arg0);
8134 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8135 switch (DECL_FUNCTION_CODE (fn))
8137 CASE_FLT_FN (BUILT_IN_CEXPI):
8138 fn = mathfn_built_in (type, BUILT_IN_COS);
8140 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8150 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8151 return fold_convert (type, integer_zero_node);
8152 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8153 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8154 TREE_OPERAND (arg0, 0));
8155 if (TREE_CODE (arg0) == COMPLEX_CST)
8156 return fold_convert (type, TREE_IMAGPART (arg0));
8157 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8159 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8160 tem = fold_build2 (TREE_CODE (arg0), itype,
8161 fold_build1 (IMAGPART_EXPR, itype,
8162 TREE_OPERAND (arg0, 0)),
8163 fold_build1 (IMAGPART_EXPR, itype,
8164 TREE_OPERAND (arg0, 1)));
8165 return fold_convert (type, tem);
8167 if (TREE_CODE (arg0) == CONJ_EXPR)
8169 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8170 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8171 return fold_convert (type, negate_expr (tem));
8173 if (TREE_CODE (arg0) == CALL_EXPR)
8175 tree fn = get_callee_fndecl (arg0);
8176 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8177 switch (DECL_FUNCTION_CODE (fn))
8179 CASE_FLT_FN (BUILT_IN_CEXPI):
8180 fn = mathfn_built_in (type, BUILT_IN_SIN);
8182 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8193 } /* switch (code) */
8196 /* Fold a binary expression of code CODE and type TYPE with operands
8197 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8198 Return the folded expression if folding is successful. Otherwise,
8199 return NULL_TREE. */
8202 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8204 enum tree_code compl_code;
8206 if (code == MIN_EXPR)
8207 compl_code = MAX_EXPR;
8208 else if (code == MAX_EXPR)
8209 compl_code = MIN_EXPR;
8213 /* MIN (MAX (a, b), b) == b. */
8214 if (TREE_CODE (op0) == compl_code
8215 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8216 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8218 /* MIN (MAX (b, a), b) == b. */
8219 if (TREE_CODE (op0) == compl_code
8220 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8221 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8222 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8224 /* MIN (a, MAX (a, b)) == a. */
8225 if (TREE_CODE (op1) == compl_code
8226 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8227 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8228 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8230 /* MIN (a, MAX (b, a)) == a. */
8231 if (TREE_CODE (op1) == compl_code
8232 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8233 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8234 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8239 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8240 by changing CODE to reduce the magnitude of constants involved in
8241 ARG0 of the comparison.
8242 Returns a canonicalized comparison tree if a simplification was
8243 possible, otherwise returns NULL_TREE.
8244 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8245 valid if signed overflow is undefined. */
8248 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8249 tree arg0, tree arg1,
8250 bool *strict_overflow_p)
8252 enum tree_code code0 = TREE_CODE (arg0);
8253 tree t, cst0 = NULL_TREE;
8257 /* Match A +- CST code arg1 and CST code arg1. */
8258 if (!(((code0 == MINUS_EXPR
8259 || code0 == PLUS_EXPR)
8260 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8261 || code0 == INTEGER_CST))
8264 /* Identify the constant in arg0 and its sign. */
8265 if (code0 == INTEGER_CST)
8268 cst0 = TREE_OPERAND (arg0, 1);
8269 sgn0 = tree_int_cst_sgn (cst0);
8271 /* Overflowed constants and zero will cause problems. */
8272 if (integer_zerop (cst0)
8273 || TREE_OVERFLOW (cst0))
8276 /* See if we can reduce the magnitude of the constant in
8277 arg0 by changing the comparison code. */
8278 if (code0 == INTEGER_CST)
8280 /* CST <= arg1 -> CST-1 < arg1. */
8281 if (code == LE_EXPR && sgn0 == 1)
8283 /* -CST < arg1 -> -CST-1 <= arg1. */
8284 else if (code == LT_EXPR && sgn0 == -1)
8286 /* CST > arg1 -> CST-1 >= arg1. */
8287 else if (code == GT_EXPR && sgn0 == 1)
8289 /* -CST >= arg1 -> -CST-1 > arg1. */
8290 else if (code == GE_EXPR && sgn0 == -1)
8294 /* arg1 code' CST' might be more canonical. */
8299 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8301 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8303 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8304 else if (code == GT_EXPR
8305 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8307 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8308 else if (code == LE_EXPR
8309 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8311 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8312 else if (code == GE_EXPR
8313 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8317 *strict_overflow_p = true;
8320 /* Now build the constant reduced in magnitude. */
8321 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8322 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8323 if (code0 != INTEGER_CST)
8324 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8326 /* If swapping might yield to a more canonical form, do so. */
8328 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8330 return fold_build2 (code, type, t, arg1);
8333 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8334 overflow further. Try to decrease the magnitude of constants involved
8335 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8336 and put sole constants at the second argument position.
8337 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8340 maybe_canonicalize_comparison (enum tree_code code, tree type,
8341 tree arg0, tree arg1)
8344 bool strict_overflow_p;
8345 const char * const warnmsg = G_("assuming signed overflow does not occur "
8346 "when reducing constant in comparison");
8348 /* In principle pointers also have undefined overflow behavior,
8349 but that causes problems elsewhere. */
8350 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8351 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8354 /* Try canonicalization by simplifying arg0. */
8355 strict_overflow_p = false;
8356 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8357 &strict_overflow_p);
8360 if (strict_overflow_p)
8361 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8365 /* Try canonicalization by simplifying arg1 using the swapped
8367 code = swap_tree_comparison (code);
8368 strict_overflow_p = false;
8369 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8370 &strict_overflow_p);
8371 if (t && strict_overflow_p)
8372 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8376 /* Subroutine of fold_binary. This routine performs all of the
8377 transformations that are common to the equality/inequality
8378 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8379 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8380 fold_binary should call fold_binary. Fold a comparison with
8381 tree code CODE and type TYPE with operands OP0 and OP1. Return
8382 the folded comparison or NULL_TREE. */
8385 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8387 tree arg0, arg1, tem;
8392 STRIP_SIGN_NOPS (arg0);
8393 STRIP_SIGN_NOPS (arg1);
8395 tem = fold_relational_const (code, type, arg0, arg1);
8396 if (tem != NULL_TREE)
8399 /* If one arg is a real or integer constant, put it last. */
8400 if (tree_swap_operands_p (arg0, arg1, true))
8401 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8403 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8404 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8405 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8406 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8407 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8408 && (TREE_CODE (arg1) == INTEGER_CST
8409 && !TREE_OVERFLOW (arg1)))
8411 tree const1 = TREE_OPERAND (arg0, 1);
8413 tree variable = TREE_OPERAND (arg0, 0);
8416 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8418 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8419 TREE_TYPE (arg1), const2, const1);
8421 /* If the constant operation overflowed this can be
8422 simplified as a comparison against INT_MAX/INT_MIN. */
8423 if (TREE_CODE (lhs) == INTEGER_CST
8424 && TREE_OVERFLOW (lhs))
8426 int const1_sgn = tree_int_cst_sgn (const1);
8427 enum tree_code code2 = code;
8429 /* Get the sign of the constant on the lhs if the
8430 operation were VARIABLE + CONST1. */
8431 if (TREE_CODE (arg0) == MINUS_EXPR)
8432 const1_sgn = -const1_sgn;
8434 /* The sign of the constant determines if we overflowed
8435 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8436 Canonicalize to the INT_MIN overflow by swapping the comparison
8438 if (const1_sgn == -1)
8439 code2 = swap_tree_comparison (code);
8441 /* We now can look at the canonicalized case
8442 VARIABLE + 1 CODE2 INT_MIN
8443 and decide on the result. */
8444 if (code2 == LT_EXPR
8446 || code2 == EQ_EXPR)
8447 return omit_one_operand (type, boolean_false_node, variable);
8448 else if (code2 == NE_EXPR
8450 || code2 == GT_EXPR)
8451 return omit_one_operand (type, boolean_true_node, variable);
8454 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8455 && (TREE_CODE (lhs) != INTEGER_CST
8456 || !TREE_OVERFLOW (lhs)))
8458 fold_overflow_warning (("assuming signed overflow does not occur "
8459 "when changing X +- C1 cmp C2 to "
8461 WARN_STRICT_OVERFLOW_COMPARISON);
8462 return fold_build2 (code, type, variable, lhs);
8466 /* For comparisons of pointers we can decompose it to a compile time
8467 comparison of the base objects and the offsets into the object.
8468 This requires at least one operand being an ADDR_EXPR to do more
8469 than the operand_equal_p test below. */
8470 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8471 && (TREE_CODE (arg0) == ADDR_EXPR
8472 || TREE_CODE (arg1) == ADDR_EXPR))
8474 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8475 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8476 enum machine_mode mode;
8477 int volatilep, unsignedp;
8478 bool indirect_base0 = false;
8480 /* Get base and offset for the access. Strip ADDR_EXPR for
8481 get_inner_reference, but put it back by stripping INDIRECT_REF
8482 off the base object if possible. */
8484 if (TREE_CODE (arg0) == ADDR_EXPR)
8486 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8487 &bitsize, &bitpos0, &offset0, &mode,
8488 &unsignedp, &volatilep, false);
8489 if (TREE_CODE (base0) == INDIRECT_REF)
8490 base0 = TREE_OPERAND (base0, 0);
8492 indirect_base0 = true;
8496 if (TREE_CODE (arg1) == ADDR_EXPR)
8498 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8499 &bitsize, &bitpos1, &offset1, &mode,
8500 &unsignedp, &volatilep, false);
8501 /* We have to make sure to have an indirect/non-indirect base1
8502 just the same as we did for base0. */
8503 if (TREE_CODE (base1) == INDIRECT_REF
8505 base1 = TREE_OPERAND (base1, 0);
8506 else if (!indirect_base0)
8509 else if (indirect_base0)
8512 /* If we have equivalent bases we might be able to simplify. */
8514 && operand_equal_p (base0, base1, 0))
8516 /* We can fold this expression to a constant if the non-constant
8517 offset parts are equal. */
8518 if (offset0 == offset1
8519 || (offset0 && offset1
8520 && operand_equal_p (offset0, offset1, 0)))
8525 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8527 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8529 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8531 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8533 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8535 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8539 /* We can simplify the comparison to a comparison of the variable
8540 offset parts if the constant offset parts are equal.
8541 Be careful to use signed size type here because otherwise we
8542 mess with array offsets in the wrong way. This is possible
8543 because pointer arithmetic is restricted to retain within an
8544 object and overflow on pointer differences is undefined as of
8545 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8546 else if (bitpos0 == bitpos1)
8548 tree signed_size_type_node;
8549 signed_size_type_node = signed_type_for (size_type_node);
8551 /* By converting to signed size type we cover middle-end pointer
8552 arithmetic which operates on unsigned pointer types of size
8553 type size and ARRAY_REF offsets which are properly sign or
8554 zero extended from their type in case it is narrower than
8556 if (offset0 == NULL_TREE)
8557 offset0 = build_int_cst (signed_size_type_node, 0);
8559 offset0 = fold_convert (signed_size_type_node, offset0);
8560 if (offset1 == NULL_TREE)
8561 offset1 = build_int_cst (signed_size_type_node, 0);
8563 offset1 = fold_convert (signed_size_type_node, offset1);
8565 return fold_build2 (code, type, offset0, offset1);
8570 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8571 same object, then we can fold this to a comparison of the two offsets in
8572 signed size type. This is possible because pointer arithmetic is
8573 restricted to retain within an object and overflow on pointer differences
8574 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8576 We check flag_wrapv directly because pointers types are unsigned,
8577 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8578 normally what we want to avoid certain odd overflow cases, but
8580 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8582 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8584 tree base0, offset0, base1, offset1;
8586 if (extract_array_ref (arg0, &base0, &offset0)
8587 && extract_array_ref (arg1, &base1, &offset1)
8588 && operand_equal_p (base0, base1, 0))
8590 tree signed_size_type_node;
8591 signed_size_type_node = signed_type_for (size_type_node);
8593 /* By converting to signed size type we cover middle-end pointer
8594 arithmetic which operates on unsigned pointer types of size
8595 type size and ARRAY_REF offsets which are properly sign or
8596 zero extended from their type in case it is narrower than
8598 if (offset0 == NULL_TREE)
8599 offset0 = build_int_cst (signed_size_type_node, 0);
8601 offset0 = fold_convert (signed_size_type_node, offset0);
8602 if (offset1 == NULL_TREE)
8603 offset1 = build_int_cst (signed_size_type_node, 0);
8605 offset1 = fold_convert (signed_size_type_node, offset1);
8607 return fold_build2 (code, type, offset0, offset1);
8611 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8612 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8613 the resulting offset is smaller in absolute value than the
8615 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8616 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8617 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8618 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8619 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8620 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8621 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8623 tree const1 = TREE_OPERAND (arg0, 1);
8624 tree const2 = TREE_OPERAND (arg1, 1);
8625 tree variable1 = TREE_OPERAND (arg0, 0);
8626 tree variable2 = TREE_OPERAND (arg1, 0);
8628 const char * const warnmsg = G_("assuming signed overflow does not "
8629 "occur when combining constants around "
8632 /* Put the constant on the side where it doesn't overflow and is
8633 of lower absolute value than before. */
8634 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8635 ? MINUS_EXPR : PLUS_EXPR,
8637 if (!TREE_OVERFLOW (cst)
8638 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8640 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8641 return fold_build2 (code, type,
8643 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8647 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8648 ? MINUS_EXPR : PLUS_EXPR,
8650 if (!TREE_OVERFLOW (cst)
8651 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8653 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8654 return fold_build2 (code, type,
8655 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8661 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8662 signed arithmetic case. That form is created by the compiler
8663 often enough for folding it to be of value. One example is in
8664 computing loop trip counts after Operator Strength Reduction. */
8665 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8666 && TREE_CODE (arg0) == MULT_EXPR
8667 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8668 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8669 && integer_zerop (arg1))
8671 tree const1 = TREE_OPERAND (arg0, 1);
8672 tree const2 = arg1; /* zero */
8673 tree variable1 = TREE_OPERAND (arg0, 0);
8674 enum tree_code cmp_code = code;
8676 gcc_assert (!integer_zerop (const1));
8678 fold_overflow_warning (("assuming signed overflow does not occur when "
8679 "eliminating multiplication in comparison "
8681 WARN_STRICT_OVERFLOW_COMPARISON);
8683 /* If const1 is negative we swap the sense of the comparison. */
8684 if (tree_int_cst_sgn (const1) < 0)
8685 cmp_code = swap_tree_comparison (cmp_code);
8687 return fold_build2 (cmp_code, type, variable1, const2);
8690 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8694 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8696 tree targ0 = strip_float_extensions (arg0);
8697 tree targ1 = strip_float_extensions (arg1);
8698 tree newtype = TREE_TYPE (targ0);
8700 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8701 newtype = TREE_TYPE (targ1);
8703 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8704 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8705 return fold_build2 (code, type, fold_convert (newtype, targ0),
8706 fold_convert (newtype, targ1));
8708 /* (-a) CMP (-b) -> b CMP a */
8709 if (TREE_CODE (arg0) == NEGATE_EXPR
8710 && TREE_CODE (arg1) == NEGATE_EXPR)
8711 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8712 TREE_OPERAND (arg0, 0));
8714 if (TREE_CODE (arg1) == REAL_CST)
8716 REAL_VALUE_TYPE cst;
8717 cst = TREE_REAL_CST (arg1);
8719 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8720 if (TREE_CODE (arg0) == NEGATE_EXPR)
8721 return fold_build2 (swap_tree_comparison (code), type,
8722 TREE_OPERAND (arg0, 0),
8723 build_real (TREE_TYPE (arg1),
8724 REAL_VALUE_NEGATE (cst)));
8726 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8727 /* a CMP (-0) -> a CMP 0 */
8728 if (REAL_VALUE_MINUS_ZERO (cst))
8729 return fold_build2 (code, type, arg0,
8730 build_real (TREE_TYPE (arg1), dconst0));
8732 /* x != NaN is always true, other ops are always false. */
8733 if (REAL_VALUE_ISNAN (cst)
8734 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8736 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8737 return omit_one_operand (type, tem, arg0);
8740 /* Fold comparisons against infinity. */
8741 if (REAL_VALUE_ISINF (cst))
8743 tem = fold_inf_compare (code, type, arg0, arg1);
8744 if (tem != NULL_TREE)
8749 /* If this is a comparison of a real constant with a PLUS_EXPR
8750 or a MINUS_EXPR of a real constant, we can convert it into a
8751 comparison with a revised real constant as long as no overflow
8752 occurs when unsafe_math_optimizations are enabled. */
8753 if (flag_unsafe_math_optimizations
8754 && TREE_CODE (arg1) == REAL_CST
8755 && (TREE_CODE (arg0) == PLUS_EXPR
8756 || TREE_CODE (arg0) == MINUS_EXPR)
8757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8758 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8759 ? MINUS_EXPR : PLUS_EXPR,
8760 arg1, TREE_OPERAND (arg0, 1), 0))
8761 && !TREE_OVERFLOW (tem))
8762 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8764 /* Likewise, we can simplify a comparison of a real constant with
8765 a MINUS_EXPR whose first operand is also a real constant, i.e.
8766 (c1 - x) < c2 becomes x > c1-c2. */
8767 if (flag_unsafe_math_optimizations
8768 && TREE_CODE (arg1) == REAL_CST
8769 && TREE_CODE (arg0) == MINUS_EXPR
8770 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8771 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8773 && !TREE_OVERFLOW (tem))
8774 return fold_build2 (swap_tree_comparison (code), type,
8775 TREE_OPERAND (arg0, 1), tem);
8777 /* Fold comparisons against built-in math functions. */
8778 if (TREE_CODE (arg1) == REAL_CST
8779 && flag_unsafe_math_optimizations
8780 && ! flag_errno_math)
8782 enum built_in_function fcode = builtin_mathfn_code (arg0);
8784 if (fcode != END_BUILTINS)
8786 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8787 if (tem != NULL_TREE)
8793 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8794 && (TREE_CODE (arg0) == NOP_EXPR
8795 || TREE_CODE (arg0) == CONVERT_EXPR))
8797 /* If we are widening one operand of an integer comparison,
8798 see if the other operand is similarly being widened. Perhaps we
8799 can do the comparison in the narrower type. */
8800 tem = fold_widened_comparison (code, type, arg0, arg1);
8804 /* Or if we are changing signedness. */
8805 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8810 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8811 constant, we can simplify it. */
8812 if (TREE_CODE (arg1) == INTEGER_CST
8813 && (TREE_CODE (arg0) == MIN_EXPR
8814 || TREE_CODE (arg0) == MAX_EXPR)
8815 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8817 tem = optimize_minmax_comparison (code, type, op0, op1);
8822 /* Simplify comparison of something with itself. (For IEEE
8823 floating-point, we can only do some of these simplifications.) */
8824 if (operand_equal_p (arg0, arg1, 0))
8829 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8830 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8831 return constant_boolean_node (1, type);
8836 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8837 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8838 return constant_boolean_node (1, type);
8839 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8842 /* For NE, we can only do this simplification if integer
8843 or we don't honor IEEE floating point NaNs. */
8844 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8845 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8847 /* ... fall through ... */
8850 return constant_boolean_node (0, type);
8856 /* If we are comparing an expression that just has comparisons
8857 of two integer values, arithmetic expressions of those comparisons,
8858 and constants, we can simplify it. There are only three cases
8859 to check: the two values can either be equal, the first can be
8860 greater, or the second can be greater. Fold the expression for
8861 those three values. Since each value must be 0 or 1, we have
8862 eight possibilities, each of which corresponds to the constant 0
8863 or 1 or one of the six possible comparisons.
8865 This handles common cases like (a > b) == 0 but also handles
8866 expressions like ((x > y) - (y > x)) > 0, which supposedly
8867 occur in macroized code. */
8869 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8871 tree cval1 = 0, cval2 = 0;
8874 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8875 /* Don't handle degenerate cases here; they should already
8876 have been handled anyway. */
8877 && cval1 != 0 && cval2 != 0
8878 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8879 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8880 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8881 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8882 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8883 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8884 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8886 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8887 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8889 /* We can't just pass T to eval_subst in case cval1 or cval2
8890 was the same as ARG1. */
8893 = fold_build2 (code, type,
8894 eval_subst (arg0, cval1, maxval,
8898 = fold_build2 (code, type,
8899 eval_subst (arg0, cval1, maxval,
8903 = fold_build2 (code, type,
8904 eval_subst (arg0, cval1, minval,
8908 /* All three of these results should be 0 or 1. Confirm they are.
8909 Then use those values to select the proper code to use. */
8911 if (TREE_CODE (high_result) == INTEGER_CST
8912 && TREE_CODE (equal_result) == INTEGER_CST
8913 && TREE_CODE (low_result) == INTEGER_CST)
8915 /* Make a 3-bit mask with the high-order bit being the
8916 value for `>', the next for '=', and the low for '<'. */
8917 switch ((integer_onep (high_result) * 4)
8918 + (integer_onep (equal_result) * 2)
8919 + integer_onep (low_result))
8923 return omit_one_operand (type, integer_zero_node, arg0);
8944 return omit_one_operand (type, integer_one_node, arg0);
8948 return save_expr (build2 (code, type, cval1, cval2));
8949 return fold_build2 (code, type, cval1, cval2);
8954 /* Fold a comparison of the address of COMPONENT_REFs with the same
8955 type and component to a comparison of the address of the base
8956 object. In short, &x->a OP &y->a to x OP y and
8957 &x->a OP &y.a to x OP &y */
8958 if (TREE_CODE (arg0) == ADDR_EXPR
8959 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8960 && TREE_CODE (arg1) == ADDR_EXPR
8961 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8963 tree cref0 = TREE_OPERAND (arg0, 0);
8964 tree cref1 = TREE_OPERAND (arg1, 0);
8965 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8967 tree op0 = TREE_OPERAND (cref0, 0);
8968 tree op1 = TREE_OPERAND (cref1, 0);
8969 return fold_build2 (code, type,
8970 fold_addr_expr (op0),
8971 fold_addr_expr (op1));
8975 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8976 into a single range test. */
8977 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8978 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8979 && TREE_CODE (arg1) == INTEGER_CST
8980 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8981 && !integer_zerop (TREE_OPERAND (arg0, 1))
8982 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8983 && !TREE_OVERFLOW (arg1))
8985 tem = fold_div_compare (code, type, arg0, arg1);
8986 if (tem != NULL_TREE)
8990 /* Fold ~X op ~Y as Y op X. */
8991 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8992 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8994 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8995 return fold_build2 (code, type,
8996 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8997 TREE_OPERAND (arg0, 0));
9000 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9001 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9002 && TREE_CODE (arg1) == INTEGER_CST)
9004 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9005 return fold_build2 (swap_tree_comparison (code), type,
9006 TREE_OPERAND (arg0, 0),
9007 fold_build1 (BIT_NOT_EXPR, cmp_type,
9008 fold_convert (cmp_type, arg1)));
9015 /* Subroutine of fold_binary. Optimize complex multiplications of the
9016 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9017 argument EXPR represents the expression "z" of type TYPE. */
9020 fold_mult_zconjz (tree type, tree expr)
9022 tree itype = TREE_TYPE (type);
9023 tree rpart, ipart, tem;
9025 if (TREE_CODE (expr) == COMPLEX_EXPR)
9027 rpart = TREE_OPERAND (expr, 0);
9028 ipart = TREE_OPERAND (expr, 1);
9030 else if (TREE_CODE (expr) == COMPLEX_CST)
9032 rpart = TREE_REALPART (expr);
9033 ipart = TREE_IMAGPART (expr);
9037 expr = save_expr (expr);
9038 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9039 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9042 rpart = save_expr (rpart);
9043 ipart = save_expr (ipart);
9044 tem = fold_build2 (PLUS_EXPR, itype,
9045 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9046 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9047 return fold_build2 (COMPLEX_EXPR, type, tem,
9048 fold_convert (itype, integer_zero_node));
9052 /* Fold a binary expression of code CODE and type TYPE with operands
9053 OP0 and OP1. Return the folded expression if folding is
9054 successful. Otherwise, return NULL_TREE. */
9057 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9059 enum tree_code_class kind = TREE_CODE_CLASS (code);
9060 tree arg0, arg1, tem;
9061 tree t1 = NULL_TREE;
9062 bool strict_overflow_p;
9064 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9065 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9066 && TREE_CODE_LENGTH (code) == 2
9068 && op1 != NULL_TREE);
9073 /* Strip any conversions that don't change the mode. This is
9074 safe for every expression, except for a comparison expression
9075 because its signedness is derived from its operands. So, in
9076 the latter case, only strip conversions that don't change the
9079 Note that this is done as an internal manipulation within the
9080 constant folder, in order to find the simplest representation
9081 of the arguments so that their form can be studied. In any
9082 cases, the appropriate type conversions should be put back in
9083 the tree that will get out of the constant folder. */
9085 if (kind == tcc_comparison)
9087 STRIP_SIGN_NOPS (arg0);
9088 STRIP_SIGN_NOPS (arg1);
9096 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9097 constant but we can't do arithmetic on them. */
9098 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9099 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9100 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9101 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9103 if (kind == tcc_binary)
9104 tem = const_binop (code, arg0, arg1, 0);
9105 else if (kind == tcc_comparison)
9106 tem = fold_relational_const (code, type, arg0, arg1);
9110 if (tem != NULL_TREE)
9112 if (TREE_TYPE (tem) != type)
9113 tem = fold_convert (type, tem);
9118 /* If this is a commutative operation, and ARG0 is a constant, move it
9119 to ARG1 to reduce the number of tests below. */
9120 if (commutative_tree_code (code)
9121 && tree_swap_operands_p (arg0, arg1, true))
9122 return fold_build2 (code, type, op1, op0);
9124 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9126 First check for cases where an arithmetic operation is applied to a
9127 compound, conditional, or comparison operation. Push the arithmetic
9128 operation inside the compound or conditional to see if any folding
9129 can then be done. Convert comparison to conditional for this purpose.
9130 The also optimizes non-constant cases that used to be done in
9133 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9134 one of the operands is a comparison and the other is a comparison, a
9135 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9136 code below would make the expression more complex. Change it to a
9137 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9138 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9140 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9141 || code == EQ_EXPR || code == NE_EXPR)
9142 && ((truth_value_p (TREE_CODE (arg0))
9143 && (truth_value_p (TREE_CODE (arg1))
9144 || (TREE_CODE (arg1) == BIT_AND_EXPR
9145 && integer_onep (TREE_OPERAND (arg1, 1)))))
9146 || (truth_value_p (TREE_CODE (arg1))
9147 && (truth_value_p (TREE_CODE (arg0))
9148 || (TREE_CODE (arg0) == BIT_AND_EXPR
9149 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9151 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9152 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9155 fold_convert (boolean_type_node, arg0),
9156 fold_convert (boolean_type_node, arg1));
9158 if (code == EQ_EXPR)
9159 tem = invert_truthvalue (tem);
9161 return fold_convert (type, tem);
9164 if (TREE_CODE_CLASS (code) == tcc_binary
9165 || TREE_CODE_CLASS (code) == tcc_comparison)
9167 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9168 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9169 fold_build2 (code, type,
9170 TREE_OPERAND (arg0, 1), op1));
9171 if (TREE_CODE (arg1) == COMPOUND_EXPR
9172 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9173 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9174 fold_build2 (code, type,
9175 op0, TREE_OPERAND (arg1, 1)));
9177 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9179 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9181 /*cond_first_p=*/1);
9182 if (tem != NULL_TREE)
9186 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9188 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9190 /*cond_first_p=*/0);
9191 if (tem != NULL_TREE)
9198 case POINTER_PLUS_EXPR:
9199 /* 0 +p index -> (type)index */
9200 if (integer_zerop (arg0))
9201 return non_lvalue (fold_convert (type, arg1));
9203 /* PTR +p 0 -> PTR */
9204 if (integer_zerop (arg1))
9205 return non_lvalue (fold_convert (type, arg0));
9207 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9208 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9209 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9210 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9211 fold_convert (sizetype, arg1),
9212 fold_convert (sizetype, arg0)));
9214 /* index +p PTR -> PTR +p index */
9215 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9216 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9217 return fold_build2 (POINTER_PLUS_EXPR, type,
9218 fold_convert (type, arg1), fold_convert (sizetype, arg0));
9220 /* (PTR +p B) +p A -> PTR +p (B + A) */
9221 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9224 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9225 tree arg00 = TREE_OPERAND (arg0, 0);
9226 inner = fold_build2 (PLUS_EXPR, sizetype, arg01, fold_convert (sizetype, arg1));
9227 return fold_build2 (POINTER_PLUS_EXPR, type, arg00, inner);
9230 /* PTR_CST +p CST -> CST1 */
9231 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9232 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9234 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9235 of the array. Loop optimizer sometimes produce this type of
9237 if (TREE_CODE (arg0) == ADDR_EXPR)
9239 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9241 return fold_convert (type, tem);
9246 /* PTR + INT -> (INT)(PTR p+ INT) */
9247 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9248 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9249 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9252 fold_convert (sizetype, arg1)));
9253 /* INT + PTR -> (INT)(PTR p+ INT) */
9254 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9255 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9256 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9259 fold_convert (sizetype, arg0)));
9260 /* A + (-B) -> A - B */
9261 if (TREE_CODE (arg1) == NEGATE_EXPR)
9262 return fold_build2 (MINUS_EXPR, type,
9263 fold_convert (type, arg0),
9264 fold_convert (type, TREE_OPERAND (arg1, 0)));
9265 /* (-A) + B -> B - A */
9266 if (TREE_CODE (arg0) == NEGATE_EXPR
9267 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9268 return fold_build2 (MINUS_EXPR, type,
9269 fold_convert (type, arg1),
9270 fold_convert (type, TREE_OPERAND (arg0, 0)));
9272 if (INTEGRAL_TYPE_P (type))
9274 /* Convert ~A + 1 to -A. */
9275 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9276 && integer_onep (arg1))
9277 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9280 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9281 && !TYPE_OVERFLOW_TRAPS (type))
9283 tree tem = TREE_OPERAND (arg0, 0);
9286 if (operand_equal_p (tem, arg1, 0))
9288 t1 = build_int_cst_type (type, -1);
9289 return omit_one_operand (type, t1, arg1);
9294 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9295 && !TYPE_OVERFLOW_TRAPS (type))
9297 tree tem = TREE_OPERAND (arg1, 0);
9300 if (operand_equal_p (arg0, tem, 0))
9302 t1 = build_int_cst_type (type, -1);
9303 return omit_one_operand (type, t1, arg0);
9308 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9310 if ((TREE_CODE (arg0) == MULT_EXPR
9311 || TREE_CODE (arg1) == MULT_EXPR)
9312 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9314 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9319 if (! FLOAT_TYPE_P (type))
9321 if (integer_zerop (arg1))
9322 return non_lvalue (fold_convert (type, arg0));
9324 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9325 with a constant, and the two constants have no bits in common,
9326 we should treat this as a BIT_IOR_EXPR since this may produce more
9328 if (TREE_CODE (arg0) == BIT_AND_EXPR
9329 && TREE_CODE (arg1) == BIT_AND_EXPR
9330 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9331 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9332 && integer_zerop (const_binop (BIT_AND_EXPR,
9333 TREE_OPERAND (arg0, 1),
9334 TREE_OPERAND (arg1, 1), 0)))
9336 code = BIT_IOR_EXPR;
9340 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9341 (plus (plus (mult) (mult)) (foo)) so that we can
9342 take advantage of the factoring cases below. */
9343 if (((TREE_CODE (arg0) == PLUS_EXPR
9344 || TREE_CODE (arg0) == MINUS_EXPR)
9345 && TREE_CODE (arg1) == MULT_EXPR)
9346 || ((TREE_CODE (arg1) == PLUS_EXPR
9347 || TREE_CODE (arg1) == MINUS_EXPR)
9348 && TREE_CODE (arg0) == MULT_EXPR))
9350 tree parg0, parg1, parg, marg;
9351 enum tree_code pcode;
9353 if (TREE_CODE (arg1) == MULT_EXPR)
9354 parg = arg0, marg = arg1;
9356 parg = arg1, marg = arg0;
9357 pcode = TREE_CODE (parg);
9358 parg0 = TREE_OPERAND (parg, 0);
9359 parg1 = TREE_OPERAND (parg, 1);
9363 if (TREE_CODE (parg0) == MULT_EXPR
9364 && TREE_CODE (parg1) != MULT_EXPR)
9365 return fold_build2 (pcode, type,
9366 fold_build2 (PLUS_EXPR, type,
9367 fold_convert (type, parg0),
9368 fold_convert (type, marg)),
9369 fold_convert (type, parg1));
9370 if (TREE_CODE (parg0) != MULT_EXPR
9371 && TREE_CODE (parg1) == MULT_EXPR)
9372 return fold_build2 (PLUS_EXPR, type,
9373 fold_convert (type, parg0),
9374 fold_build2 (pcode, type,
9375 fold_convert (type, marg),
9382 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9383 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9384 return non_lvalue (fold_convert (type, arg0));
9386 /* Likewise if the operands are reversed. */
9387 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9388 return non_lvalue (fold_convert (type, arg1));
9390 /* Convert X + -C into X - C. */
9391 if (TREE_CODE (arg1) == REAL_CST
9392 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9394 tem = fold_negate_const (arg1, type);
9395 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9396 return fold_build2 (MINUS_EXPR, type,
9397 fold_convert (type, arg0),
9398 fold_convert (type, tem));
9401 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9402 to __complex__ ( x, y ). This is not the same for SNaNs or
9403 if signed zeros are involved. */
9404 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9405 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9406 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9408 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9409 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9410 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9411 bool arg0rz = false, arg0iz = false;
9412 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9413 || (arg0i && (arg0iz = real_zerop (arg0i))))
9415 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9416 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9417 if (arg0rz && arg1i && real_zerop (arg1i))
9419 tree rp = arg1r ? arg1r
9420 : build1 (REALPART_EXPR, rtype, arg1);
9421 tree ip = arg0i ? arg0i
9422 : build1 (IMAGPART_EXPR, rtype, arg0);
9423 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9425 else if (arg0iz && arg1r && real_zerop (arg1r))
9427 tree rp = arg0r ? arg0r
9428 : build1 (REALPART_EXPR, rtype, arg0);
9429 tree ip = arg1i ? arg1i
9430 : build1 (IMAGPART_EXPR, rtype, arg1);
9431 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9436 if (flag_unsafe_math_optimizations
9437 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9438 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9439 && (tem = distribute_real_division (code, type, arg0, arg1)))
9442 /* Convert x+x into x*2.0. */
9443 if (operand_equal_p (arg0, arg1, 0)
9444 && SCALAR_FLOAT_TYPE_P (type))
9445 return fold_build2 (MULT_EXPR, type, arg0,
9446 build_real (type, dconst2));
9448 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9449 if (flag_unsafe_math_optimizations
9450 && TREE_CODE (arg1) == PLUS_EXPR
9451 && TREE_CODE (arg0) != MULT_EXPR)
9453 tree tree10 = TREE_OPERAND (arg1, 0);
9454 tree tree11 = TREE_OPERAND (arg1, 1);
9455 if (TREE_CODE (tree11) == MULT_EXPR
9456 && TREE_CODE (tree10) == MULT_EXPR)
9459 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9460 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9463 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9464 if (flag_unsafe_math_optimizations
9465 && TREE_CODE (arg0) == PLUS_EXPR
9466 && TREE_CODE (arg1) != MULT_EXPR)
9468 tree tree00 = TREE_OPERAND (arg0, 0);
9469 tree tree01 = TREE_OPERAND (arg0, 1);
9470 if (TREE_CODE (tree01) == MULT_EXPR
9471 && TREE_CODE (tree00) == MULT_EXPR)
9474 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9475 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9481 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9482 is a rotate of A by C1 bits. */
9483 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9484 is a rotate of A by B bits. */
9486 enum tree_code code0, code1;
9487 code0 = TREE_CODE (arg0);
9488 code1 = TREE_CODE (arg1);
9489 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9490 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9491 && operand_equal_p (TREE_OPERAND (arg0, 0),
9492 TREE_OPERAND (arg1, 0), 0)
9493 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9495 tree tree01, tree11;
9496 enum tree_code code01, code11;
9498 tree01 = TREE_OPERAND (arg0, 1);
9499 tree11 = TREE_OPERAND (arg1, 1);
9500 STRIP_NOPS (tree01);
9501 STRIP_NOPS (tree11);
9502 code01 = TREE_CODE (tree01);
9503 code11 = TREE_CODE (tree11);
9504 if (code01 == INTEGER_CST
9505 && code11 == INTEGER_CST
9506 && TREE_INT_CST_HIGH (tree01) == 0
9507 && TREE_INT_CST_HIGH (tree11) == 0
9508 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9509 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9510 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9511 code0 == LSHIFT_EXPR ? tree01 : tree11);
9512 else if (code11 == MINUS_EXPR)
9514 tree tree110, tree111;
9515 tree110 = TREE_OPERAND (tree11, 0);
9516 tree111 = TREE_OPERAND (tree11, 1);
9517 STRIP_NOPS (tree110);
9518 STRIP_NOPS (tree111);
9519 if (TREE_CODE (tree110) == INTEGER_CST
9520 && 0 == compare_tree_int (tree110,
9522 (TREE_TYPE (TREE_OPERAND
9524 && operand_equal_p (tree01, tree111, 0))
9525 return build2 ((code0 == LSHIFT_EXPR
9528 type, TREE_OPERAND (arg0, 0), tree01);
9530 else if (code01 == MINUS_EXPR)
9532 tree tree010, tree011;
9533 tree010 = TREE_OPERAND (tree01, 0);
9534 tree011 = TREE_OPERAND (tree01, 1);
9535 STRIP_NOPS (tree010);
9536 STRIP_NOPS (tree011);
9537 if (TREE_CODE (tree010) == INTEGER_CST
9538 && 0 == compare_tree_int (tree010,
9540 (TREE_TYPE (TREE_OPERAND
9542 && operand_equal_p (tree11, tree011, 0))
9543 return build2 ((code0 != LSHIFT_EXPR
9546 type, TREE_OPERAND (arg0, 0), tree11);
9552 /* In most languages, can't associate operations on floats through
9553 parentheses. Rather than remember where the parentheses were, we
9554 don't associate floats at all, unless the user has specified
9555 -funsafe-math-optimizations. */
9557 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9559 tree var0, con0, lit0, minus_lit0;
9560 tree var1, con1, lit1, minus_lit1;
9563 /* Split both trees into variables, constants, and literals. Then
9564 associate each group together, the constants with literals,
9565 then the result with variables. This increases the chances of
9566 literals being recombined later and of generating relocatable
9567 expressions for the sum of a constant and literal. */
9568 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9569 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9570 code == MINUS_EXPR);
9572 /* With undefined overflow we can only associate constants
9573 with one variable. */
9574 if ((POINTER_TYPE_P (type)
9575 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9581 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9582 tmp0 = TREE_OPERAND (tmp0, 0);
9583 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9584 tmp1 = TREE_OPERAND (tmp1, 0);
9585 /* The only case we can still associate with two variables
9586 is if they are the same, modulo negation. */
9587 if (!operand_equal_p (tmp0, tmp1, 0))
9591 /* Only do something if we found more than two objects. Otherwise,
9592 nothing has changed and we risk infinite recursion. */
9594 && (2 < ((var0 != 0) + (var1 != 0)
9595 + (con0 != 0) + (con1 != 0)
9596 + (lit0 != 0) + (lit1 != 0)
9597 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9599 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9600 if (code == MINUS_EXPR)
9603 var0 = associate_trees (var0, var1, code, type);
9604 con0 = associate_trees (con0, con1, code, type);
9605 lit0 = associate_trees (lit0, lit1, code, type);
9606 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9608 /* Preserve the MINUS_EXPR if the negative part of the literal is
9609 greater than the positive part. Otherwise, the multiplicative
9610 folding code (i.e extract_muldiv) may be fooled in case
9611 unsigned constants are subtracted, like in the following
9612 example: ((X*2 + 4) - 8U)/2. */
9613 if (minus_lit0 && lit0)
9615 if (TREE_CODE (lit0) == INTEGER_CST
9616 && TREE_CODE (minus_lit0) == INTEGER_CST
9617 && tree_int_cst_lt (lit0, minus_lit0))
9619 minus_lit0 = associate_trees (minus_lit0, lit0,
9625 lit0 = associate_trees (lit0, minus_lit0,
9633 return fold_convert (type,
9634 associate_trees (var0, minus_lit0,
9638 con0 = associate_trees (con0, minus_lit0,
9640 return fold_convert (type,
9641 associate_trees (var0, con0,
9646 con0 = associate_trees (con0, lit0, code, type);
9647 return fold_convert (type, associate_trees (var0, con0,
9655 /* Pointer simplifications for subtraction, simple reassociations. */
9656 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9658 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9659 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9660 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9662 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9663 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9664 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9665 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9666 return fold_build2 (PLUS_EXPR, type,
9667 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9668 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9670 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9671 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9673 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9674 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9675 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9677 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9680 /* A - (-B) -> A + B */
9681 if (TREE_CODE (arg1) == NEGATE_EXPR)
9682 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9683 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9684 if (TREE_CODE (arg0) == NEGATE_EXPR
9685 && (FLOAT_TYPE_P (type)
9686 || INTEGRAL_TYPE_P (type))
9687 && negate_expr_p (arg1)
9688 && reorder_operands_p (arg0, arg1))
9689 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9690 TREE_OPERAND (arg0, 0));
9691 /* Convert -A - 1 to ~A. */
9692 if (INTEGRAL_TYPE_P (type)
9693 && TREE_CODE (arg0) == NEGATE_EXPR
9694 && integer_onep (arg1)
9695 && !TYPE_OVERFLOW_TRAPS (type))
9696 return fold_build1 (BIT_NOT_EXPR, type,
9697 fold_convert (type, TREE_OPERAND (arg0, 0)));
9699 /* Convert -1 - A to ~A. */
9700 if (INTEGRAL_TYPE_P (type)
9701 && integer_all_onesp (arg0))
9702 return fold_build1 (BIT_NOT_EXPR, type, op1);
9704 if (! FLOAT_TYPE_P (type))
9706 if (integer_zerop (arg0))
9707 return negate_expr (fold_convert (type, arg1));
9708 if (integer_zerop (arg1))
9709 return non_lvalue (fold_convert (type, arg0));
9711 /* Fold A - (A & B) into ~B & A. */
9712 if (!TREE_SIDE_EFFECTS (arg0)
9713 && TREE_CODE (arg1) == BIT_AND_EXPR)
9715 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9716 return fold_build2 (BIT_AND_EXPR, type,
9717 fold_build1 (BIT_NOT_EXPR, type,
9718 TREE_OPERAND (arg1, 0)),
9720 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9721 return fold_build2 (BIT_AND_EXPR, type,
9722 fold_build1 (BIT_NOT_EXPR, type,
9723 TREE_OPERAND (arg1, 1)),
9727 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9728 any power of 2 minus 1. */
9729 if (TREE_CODE (arg0) == BIT_AND_EXPR
9730 && TREE_CODE (arg1) == BIT_AND_EXPR
9731 && operand_equal_p (TREE_OPERAND (arg0, 0),
9732 TREE_OPERAND (arg1, 0), 0))
9734 tree mask0 = TREE_OPERAND (arg0, 1);
9735 tree mask1 = TREE_OPERAND (arg1, 1);
9736 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9738 if (operand_equal_p (tem, mask1, 0))
9740 tem = fold_build2 (BIT_XOR_EXPR, type,
9741 TREE_OPERAND (arg0, 0), mask1);
9742 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9747 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9748 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9749 return non_lvalue (fold_convert (type, arg0));
9751 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9752 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9753 (-ARG1 + ARG0) reduces to -ARG1. */
9754 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9755 return negate_expr (fold_convert (type, arg1));
9757 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9758 __complex__ ( x, -y ). This is not the same for SNaNs or if
9759 signed zeros are involved. */
9760 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9761 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9762 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9764 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9765 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9766 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9767 bool arg0rz = false, arg0iz = false;
9768 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9769 || (arg0i && (arg0iz = real_zerop (arg0i))))
9771 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9772 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9773 if (arg0rz && arg1i && real_zerop (arg1i))
9775 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9777 : build1 (REALPART_EXPR, rtype, arg1));
9778 tree ip = arg0i ? arg0i
9779 : build1 (IMAGPART_EXPR, rtype, arg0);
9780 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9782 else if (arg0iz && arg1r && real_zerop (arg1r))
9784 tree rp = arg0r ? arg0r
9785 : build1 (REALPART_EXPR, rtype, arg0);
9786 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9788 : build1 (IMAGPART_EXPR, rtype, arg1));
9789 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9794 /* Fold &x - &x. This can happen from &x.foo - &x.
9795 This is unsafe for certain floats even in non-IEEE formats.
9796 In IEEE, it is unsafe because it does wrong for NaNs.
9797 Also note that operand_equal_p is always false if an operand
9800 if ((! FLOAT_TYPE_P (type)
9801 || (flag_unsafe_math_optimizations
9802 && !HONOR_NANS (TYPE_MODE (type))
9803 && !HONOR_INFINITIES (TYPE_MODE (type))))
9804 && operand_equal_p (arg0, arg1, 0))
9805 return fold_convert (type, integer_zero_node);
9807 /* A - B -> A + (-B) if B is easily negatable. */
9808 if (negate_expr_p (arg1)
9809 && ((FLOAT_TYPE_P (type)
9810 /* Avoid this transformation if B is a positive REAL_CST. */
9811 && (TREE_CODE (arg1) != REAL_CST
9812 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9813 || INTEGRAL_TYPE_P (type)))
9814 return fold_build2 (PLUS_EXPR, type,
9815 fold_convert (type, arg0),
9816 fold_convert (type, negate_expr (arg1)));
9818 /* Try folding difference of addresses. */
9822 if ((TREE_CODE (arg0) == ADDR_EXPR
9823 || TREE_CODE (arg1) == ADDR_EXPR)
9824 && ptr_difference_const (arg0, arg1, &diff))
9825 return build_int_cst_type (type, diff);
9828 /* Fold &a[i] - &a[j] to i-j. */
9829 if (TREE_CODE (arg0) == ADDR_EXPR
9830 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9831 && TREE_CODE (arg1) == ADDR_EXPR
9832 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9834 tree aref0 = TREE_OPERAND (arg0, 0);
9835 tree aref1 = TREE_OPERAND (arg1, 0);
9836 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9837 TREE_OPERAND (aref1, 0), 0))
9839 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9840 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9841 tree esz = array_ref_element_size (aref0);
9842 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9843 return fold_build2 (MULT_EXPR, type, diff,
9844 fold_convert (type, esz));
9849 if (flag_unsafe_math_optimizations
9850 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9851 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9852 && (tem = distribute_real_division (code, type, arg0, arg1)))
9855 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9857 if ((TREE_CODE (arg0) == MULT_EXPR
9858 || TREE_CODE (arg1) == MULT_EXPR)
9859 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9861 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9869 /* (-A) * (-B) -> A * B */
9870 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9871 return fold_build2 (MULT_EXPR, type,
9872 fold_convert (type, TREE_OPERAND (arg0, 0)),
9873 fold_convert (type, negate_expr (arg1)));
9874 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9875 return fold_build2 (MULT_EXPR, type,
9876 fold_convert (type, negate_expr (arg0)),
9877 fold_convert (type, TREE_OPERAND (arg1, 0)));
9879 if (! FLOAT_TYPE_P (type))
9881 if (integer_zerop (arg1))
9882 return omit_one_operand (type, arg1, arg0);
9883 if (integer_onep (arg1))
9884 return non_lvalue (fold_convert (type, arg0));
9885 /* Transform x * -1 into -x. */
9886 if (integer_all_onesp (arg1))
9887 return fold_convert (type, negate_expr (arg0));
9888 /* Transform x * -C into -x * C if x is easily negatable. */
9889 if (TREE_CODE (arg1) == INTEGER_CST
9890 && tree_int_cst_sgn (arg1) == -1
9891 && negate_expr_p (arg0)
9892 && (tem = negate_expr (arg1)) != arg1
9893 && !TREE_OVERFLOW (tem))
9894 return fold_build2 (MULT_EXPR, type,
9895 negate_expr (arg0), tem);
9897 /* (a * (1 << b)) is (a << b) */
9898 if (TREE_CODE (arg1) == LSHIFT_EXPR
9899 && integer_onep (TREE_OPERAND (arg1, 0)))
9900 return fold_build2 (LSHIFT_EXPR, type, arg0,
9901 TREE_OPERAND (arg1, 1));
9902 if (TREE_CODE (arg0) == LSHIFT_EXPR
9903 && integer_onep (TREE_OPERAND (arg0, 0)))
9904 return fold_build2 (LSHIFT_EXPR, type, arg1,
9905 TREE_OPERAND (arg0, 1));
9907 strict_overflow_p = false;
9908 if (TREE_CODE (arg1) == INTEGER_CST
9909 && 0 != (tem = extract_muldiv (op0,
9910 fold_convert (type, arg1),
9912 &strict_overflow_p)))
9914 if (strict_overflow_p)
9915 fold_overflow_warning (("assuming signed overflow does not "
9916 "occur when simplifying "
9918 WARN_STRICT_OVERFLOW_MISC);
9919 return fold_convert (type, tem);
9922 /* Optimize z * conj(z) for integer complex numbers. */
9923 if (TREE_CODE (arg0) == CONJ_EXPR
9924 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9925 return fold_mult_zconjz (type, arg1);
9926 if (TREE_CODE (arg1) == CONJ_EXPR
9927 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9928 return fold_mult_zconjz (type, arg0);
9932 /* Maybe fold x * 0 to 0. The expressions aren't the same
9933 when x is NaN, since x * 0 is also NaN. Nor are they the
9934 same in modes with signed zeros, since multiplying a
9935 negative value by 0 gives -0, not +0. */
9936 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9937 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9938 && real_zerop (arg1))
9939 return omit_one_operand (type, arg1, arg0);
9940 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9941 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9942 && real_onep (arg1))
9943 return non_lvalue (fold_convert (type, arg0));
9945 /* Transform x * -1.0 into -x. */
9946 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9947 && real_minus_onep (arg1))
9948 return fold_convert (type, negate_expr (arg0));
9950 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9951 if (flag_unsafe_math_optimizations
9952 && TREE_CODE (arg0) == RDIV_EXPR
9953 && TREE_CODE (arg1) == REAL_CST
9954 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9956 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9959 return fold_build2 (RDIV_EXPR, type, tem,
9960 TREE_OPERAND (arg0, 1));
9963 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9964 if (operand_equal_p (arg0, arg1, 0))
9966 tree tem = fold_strip_sign_ops (arg0);
9967 if (tem != NULL_TREE)
9969 tem = fold_convert (type, tem);
9970 return fold_build2 (MULT_EXPR, type, tem, tem);
9974 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9975 This is not the same for NaNs or if signed zeros are
9977 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9978 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9979 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9980 && TREE_CODE (arg1) == COMPLEX_CST
9981 && real_zerop (TREE_REALPART (arg1)))
9983 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9984 if (real_onep (TREE_IMAGPART (arg1)))
9985 return fold_build2 (COMPLEX_EXPR, type,
9986 negate_expr (fold_build1 (IMAGPART_EXPR,
9988 fold_build1 (REALPART_EXPR, rtype, arg0));
9989 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9990 return fold_build2 (COMPLEX_EXPR, type,
9991 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9992 negate_expr (fold_build1 (REALPART_EXPR,
9996 /* Optimize z * conj(z) for floating point complex numbers.
9997 Guarded by flag_unsafe_math_optimizations as non-finite
9998 imaginary components don't produce scalar results. */
9999 if (flag_unsafe_math_optimizations
10000 && TREE_CODE (arg0) == CONJ_EXPR
10001 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10002 return fold_mult_zconjz (type, arg1);
10003 if (flag_unsafe_math_optimizations
10004 && TREE_CODE (arg1) == CONJ_EXPR
10005 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10006 return fold_mult_zconjz (type, arg0);
10008 if (flag_unsafe_math_optimizations)
10010 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10011 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10013 /* Optimizations of root(...)*root(...). */
10014 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10017 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10018 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10020 /* Optimize sqrt(x)*sqrt(x) as x. */
10021 if (BUILTIN_SQRT_P (fcode0)
10022 && operand_equal_p (arg00, arg10, 0)
10023 && ! HONOR_SNANS (TYPE_MODE (type)))
10026 /* Optimize root(x)*root(y) as root(x*y). */
10027 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10028 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10029 return build_call_expr (rootfn, 1, arg);
10032 /* Optimize expN(x)*expN(y) as expN(x+y). */
10033 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10035 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10036 tree arg = fold_build2 (PLUS_EXPR, type,
10037 CALL_EXPR_ARG (arg0, 0),
10038 CALL_EXPR_ARG (arg1, 0));
10039 return build_call_expr (expfn, 1, arg);
10042 /* Optimizations of pow(...)*pow(...). */
10043 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10044 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10045 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10047 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10048 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10049 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10050 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10052 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10053 if (operand_equal_p (arg01, arg11, 0))
10055 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10056 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10057 return build_call_expr (powfn, 2, arg, arg01);
10060 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10061 if (operand_equal_p (arg00, arg10, 0))
10063 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10064 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10065 return build_call_expr (powfn, 2, arg00, arg);
10069 /* Optimize tan(x)*cos(x) as sin(x). */
10070 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10071 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10072 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10073 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10074 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10075 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10076 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10077 CALL_EXPR_ARG (arg1, 0), 0))
10079 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10081 if (sinfn != NULL_TREE)
10082 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10085 /* Optimize x*pow(x,c) as pow(x,c+1). */
10086 if (fcode1 == BUILT_IN_POW
10087 || fcode1 == BUILT_IN_POWF
10088 || fcode1 == BUILT_IN_POWL)
10090 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10091 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10092 if (TREE_CODE (arg11) == REAL_CST
10093 && !TREE_OVERFLOW (arg11)
10094 && operand_equal_p (arg0, arg10, 0))
10096 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10100 c = TREE_REAL_CST (arg11);
10101 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10102 arg = build_real (type, c);
10103 return build_call_expr (powfn, 2, arg0, arg);
10107 /* Optimize pow(x,c)*x as pow(x,c+1). */
10108 if (fcode0 == BUILT_IN_POW
10109 || fcode0 == BUILT_IN_POWF
10110 || fcode0 == BUILT_IN_POWL)
10112 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10113 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10114 if (TREE_CODE (arg01) == REAL_CST
10115 && !TREE_OVERFLOW (arg01)
10116 && operand_equal_p (arg1, arg00, 0))
10118 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10122 c = TREE_REAL_CST (arg01);
10123 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10124 arg = build_real (type, c);
10125 return build_call_expr (powfn, 2, arg1, arg);
10129 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10130 if (! optimize_size
10131 && operand_equal_p (arg0, arg1, 0))
10133 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10137 tree arg = build_real (type, dconst2);
10138 return build_call_expr (powfn, 2, arg0, arg);
10147 if (integer_all_onesp (arg1))
10148 return omit_one_operand (type, arg1, arg0);
10149 if (integer_zerop (arg1))
10150 return non_lvalue (fold_convert (type, arg0));
10151 if (operand_equal_p (arg0, arg1, 0))
10152 return non_lvalue (fold_convert (type, arg0));
10154 /* ~X | X is -1. */
10155 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10156 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10158 t1 = build_int_cst_type (type, -1);
10159 return omit_one_operand (type, t1, arg1);
10162 /* X | ~X is -1. */
10163 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10164 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10166 t1 = build_int_cst_type (type, -1);
10167 return omit_one_operand (type, t1, arg0);
10170 /* Canonicalize (X & C1) | C2. */
10171 if (TREE_CODE (arg0) == BIT_AND_EXPR
10172 && TREE_CODE (arg1) == INTEGER_CST
10173 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10175 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10176 int width = TYPE_PRECISION (type);
10177 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10178 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10179 hi2 = TREE_INT_CST_HIGH (arg1);
10180 lo2 = TREE_INT_CST_LOW (arg1);
10182 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10183 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10184 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10186 if (width > HOST_BITS_PER_WIDE_INT)
10188 mhi = (unsigned HOST_WIDE_INT) -1
10189 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10195 mlo = (unsigned HOST_WIDE_INT) -1
10196 >> (HOST_BITS_PER_WIDE_INT - width);
10199 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10200 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10201 return fold_build2 (BIT_IOR_EXPR, type,
10202 TREE_OPERAND (arg0, 0), arg1);
10204 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10207 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10208 return fold_build2 (BIT_IOR_EXPR, type,
10209 fold_build2 (BIT_AND_EXPR, type,
10210 TREE_OPERAND (arg0, 0),
10211 build_int_cst_wide (type,
10217 /* (X & Y) | Y is (X, Y). */
10218 if (TREE_CODE (arg0) == BIT_AND_EXPR
10219 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10220 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10221 /* (X & Y) | X is (Y, X). */
10222 if (TREE_CODE (arg0) == BIT_AND_EXPR
10223 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10224 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10225 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10226 /* X | (X & Y) is (Y, X). */
10227 if (TREE_CODE (arg1) == BIT_AND_EXPR
10228 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10229 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10230 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10231 /* X | (Y & X) is (Y, X). */
10232 if (TREE_CODE (arg1) == BIT_AND_EXPR
10233 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10234 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10235 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10237 t1 = distribute_bit_expr (code, type, arg0, arg1);
10238 if (t1 != NULL_TREE)
10241 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10243 This results in more efficient code for machines without a NAND
10244 instruction. Combine will canonicalize to the first form
10245 which will allow use of NAND instructions provided by the
10246 backend if they exist. */
10247 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10248 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10250 return fold_build1 (BIT_NOT_EXPR, type,
10251 build2 (BIT_AND_EXPR, type,
10252 TREE_OPERAND (arg0, 0),
10253 TREE_OPERAND (arg1, 0)));
10256 /* See if this can be simplified into a rotate first. If that
10257 is unsuccessful continue in the association code. */
10261 if (integer_zerop (arg1))
10262 return non_lvalue (fold_convert (type, arg0));
10263 if (integer_all_onesp (arg1))
10264 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10265 if (operand_equal_p (arg0, arg1, 0))
10266 return omit_one_operand (type, integer_zero_node, arg0);
10268 /* ~X ^ X is -1. */
10269 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10270 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10272 t1 = build_int_cst_type (type, -1);
10273 return omit_one_operand (type, t1, arg1);
10276 /* X ^ ~X is -1. */
10277 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10278 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10280 t1 = build_int_cst_type (type, -1);
10281 return omit_one_operand (type, t1, arg0);
10284 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10285 with a constant, and the two constants have no bits in common,
10286 we should treat this as a BIT_IOR_EXPR since this may produce more
10287 simplifications. */
10288 if (TREE_CODE (arg0) == BIT_AND_EXPR
10289 && TREE_CODE (arg1) == BIT_AND_EXPR
10290 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10291 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10292 && integer_zerop (const_binop (BIT_AND_EXPR,
10293 TREE_OPERAND (arg0, 1),
10294 TREE_OPERAND (arg1, 1), 0)))
10296 code = BIT_IOR_EXPR;
10300 /* (X | Y) ^ X -> Y & ~ X*/
10301 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10302 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10304 tree t2 = TREE_OPERAND (arg0, 1);
10305 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10307 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10308 fold_convert (type, t1));
10312 /* (Y | X) ^ X -> Y & ~ X*/
10313 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10314 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10316 tree t2 = TREE_OPERAND (arg0, 0);
10317 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10319 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10320 fold_convert (type, t1));
10324 /* X ^ (X | Y) -> Y & ~ X*/
10325 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10326 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10328 tree t2 = TREE_OPERAND (arg1, 1);
10329 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10331 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10332 fold_convert (type, t1));
10336 /* X ^ (Y | X) -> Y & ~ X*/
10337 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10338 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10340 tree t2 = TREE_OPERAND (arg1, 0);
10341 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10343 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10344 fold_convert (type, t1));
10348 /* Convert ~X ^ ~Y to X ^ Y. */
10349 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10350 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10351 return fold_build2 (code, type,
10352 fold_convert (type, TREE_OPERAND (arg0, 0)),
10353 fold_convert (type, TREE_OPERAND (arg1, 0)));
10355 /* Convert ~X ^ C to X ^ ~C. */
10356 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10357 && TREE_CODE (arg1) == INTEGER_CST)
10358 return fold_build2 (code, type,
10359 fold_convert (type, TREE_OPERAND (arg0, 0)),
10360 fold_build1 (BIT_NOT_EXPR, type, arg1));
10362 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10363 if (TREE_CODE (arg0) == BIT_AND_EXPR
10364 && integer_onep (TREE_OPERAND (arg0, 1))
10365 && integer_onep (arg1))
10366 return fold_build2 (EQ_EXPR, type, arg0,
10367 build_int_cst (TREE_TYPE (arg0), 0));
10369 /* Fold (X & Y) ^ Y as ~X & Y. */
10370 if (TREE_CODE (arg0) == BIT_AND_EXPR
10371 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10373 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10374 return fold_build2 (BIT_AND_EXPR, type,
10375 fold_build1 (BIT_NOT_EXPR, type, tem),
10376 fold_convert (type, arg1));
10378 /* Fold (X & Y) ^ X as ~Y & X. */
10379 if (TREE_CODE (arg0) == BIT_AND_EXPR
10380 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10381 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10383 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10384 return fold_build2 (BIT_AND_EXPR, type,
10385 fold_build1 (BIT_NOT_EXPR, type, tem),
10386 fold_convert (type, arg1));
10388 /* Fold X ^ (X & Y) as X & ~Y. */
10389 if (TREE_CODE (arg1) == BIT_AND_EXPR
10390 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10392 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10393 return fold_build2 (BIT_AND_EXPR, type,
10394 fold_convert (type, arg0),
10395 fold_build1 (BIT_NOT_EXPR, type, tem));
10397 /* Fold X ^ (Y & X) as ~Y & X. */
10398 if (TREE_CODE (arg1) == BIT_AND_EXPR
10399 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10400 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10402 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10403 return fold_build2 (BIT_AND_EXPR, type,
10404 fold_build1 (BIT_NOT_EXPR, type, tem),
10405 fold_convert (type, arg0));
10408 /* See if this can be simplified into a rotate first. If that
10409 is unsuccessful continue in the association code. */
10413 if (integer_all_onesp (arg1))
10414 return non_lvalue (fold_convert (type, arg0));
10415 if (integer_zerop (arg1))
10416 return omit_one_operand (type, arg1, arg0);
10417 if (operand_equal_p (arg0, arg1, 0))
10418 return non_lvalue (fold_convert (type, arg0));
10420 /* ~X & X is always zero. */
10421 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10422 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10423 return omit_one_operand (type, integer_zero_node, arg1);
10425 /* X & ~X is always zero. */
10426 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10427 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10428 return omit_one_operand (type, integer_zero_node, arg0);
10430 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10431 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10432 && TREE_CODE (arg1) == INTEGER_CST
10433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10434 return fold_build2 (BIT_IOR_EXPR, type,
10435 fold_build2 (BIT_AND_EXPR, type,
10436 TREE_OPERAND (arg0, 0), arg1),
10437 fold_build2 (BIT_AND_EXPR, type,
10438 TREE_OPERAND (arg0, 1), arg1));
10440 /* (X | Y) & Y is (X, Y). */
10441 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10442 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10443 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10444 /* (X | Y) & X is (Y, X). */
10445 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10446 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10447 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10448 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10449 /* X & (X | Y) is (Y, X). */
10450 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10451 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10452 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10453 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10454 /* X & (Y | X) is (Y, X). */
10455 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10456 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10457 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10458 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10460 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10461 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10462 && integer_onep (TREE_OPERAND (arg0, 1))
10463 && integer_onep (arg1))
10465 tem = TREE_OPERAND (arg0, 0);
10466 return fold_build2 (EQ_EXPR, type,
10467 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10468 build_int_cst (TREE_TYPE (tem), 1)),
10469 build_int_cst (TREE_TYPE (tem), 0));
10471 /* Fold ~X & 1 as (X & 1) == 0. */
10472 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10473 && integer_onep (arg1))
10475 tem = TREE_OPERAND (arg0, 0);
10476 return fold_build2 (EQ_EXPR, type,
10477 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10478 build_int_cst (TREE_TYPE (tem), 1)),
10479 build_int_cst (TREE_TYPE (tem), 0));
10482 /* Fold (X ^ Y) & Y as ~X & Y. */
10483 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10484 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10486 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10487 return fold_build2 (BIT_AND_EXPR, type,
10488 fold_build1 (BIT_NOT_EXPR, type, tem),
10489 fold_convert (type, arg1));
10491 /* Fold (X ^ Y) & X as ~Y & X. */
10492 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10493 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10494 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10496 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10497 return fold_build2 (BIT_AND_EXPR, type,
10498 fold_build1 (BIT_NOT_EXPR, type, tem),
10499 fold_convert (type, arg1));
10501 /* Fold X & (X ^ Y) as X & ~Y. */
10502 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10503 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10505 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10506 return fold_build2 (BIT_AND_EXPR, type,
10507 fold_convert (type, arg0),
10508 fold_build1 (BIT_NOT_EXPR, type, tem));
10510 /* Fold X & (Y ^ X) as ~Y & X. */
10511 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10512 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10513 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10515 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10516 return fold_build2 (BIT_AND_EXPR, type,
10517 fold_build1 (BIT_NOT_EXPR, type, tem),
10518 fold_convert (type, arg0));
10521 t1 = distribute_bit_expr (code, type, arg0, arg1);
10522 if (t1 != NULL_TREE)
10524 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10525 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10526 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10529 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10531 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10532 && (~TREE_INT_CST_LOW (arg1)
10533 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10534 return fold_convert (type, TREE_OPERAND (arg0, 0));
10537 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10539 This results in more efficient code for machines without a NOR
10540 instruction. Combine will canonicalize to the first form
10541 which will allow use of NOR instructions provided by the
10542 backend if they exist. */
10543 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10544 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10546 return fold_build1 (BIT_NOT_EXPR, type,
10547 build2 (BIT_IOR_EXPR, type,
10548 TREE_OPERAND (arg0, 0),
10549 TREE_OPERAND (arg1, 0)));
10555 /* Don't touch a floating-point divide by zero unless the mode
10556 of the constant can represent infinity. */
10557 if (TREE_CODE (arg1) == REAL_CST
10558 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10559 && real_zerop (arg1))
10562 /* Optimize A / A to 1.0 if we don't care about
10563 NaNs or Infinities. Skip the transformation
10564 for non-real operands. */
10565 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10566 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10567 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10568 && operand_equal_p (arg0, arg1, 0))
10570 tree r = build_real (TREE_TYPE (arg0), dconst1);
10572 return omit_two_operands (type, r, arg0, arg1);
10575 /* The complex version of the above A / A optimization. */
10576 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10577 && operand_equal_p (arg0, arg1, 0))
10579 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10580 if (! HONOR_NANS (TYPE_MODE (elem_type))
10581 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10583 tree r = build_real (elem_type, dconst1);
10584 /* omit_two_operands will call fold_convert for us. */
10585 return omit_two_operands (type, r, arg0, arg1);
10589 /* (-A) / (-B) -> A / B */
10590 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10591 return fold_build2 (RDIV_EXPR, type,
10592 TREE_OPERAND (arg0, 0),
10593 negate_expr (arg1));
10594 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10595 return fold_build2 (RDIV_EXPR, type,
10596 negate_expr (arg0),
10597 TREE_OPERAND (arg1, 0));
10599 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10600 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10601 && real_onep (arg1))
10602 return non_lvalue (fold_convert (type, arg0));
10604 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10605 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10606 && real_minus_onep (arg1))
10607 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10609 /* If ARG1 is a constant, we can convert this to a multiply by the
10610 reciprocal. This does not have the same rounding properties,
10611 so only do this if -funsafe-math-optimizations. We can actually
10612 always safely do it if ARG1 is a power of two, but it's hard to
10613 tell if it is or not in a portable manner. */
10614 if (TREE_CODE (arg1) == REAL_CST)
10616 if (flag_unsafe_math_optimizations
10617 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10619 return fold_build2 (MULT_EXPR, type, arg0, tem);
10620 /* Find the reciprocal if optimizing and the result is exact. */
10624 r = TREE_REAL_CST (arg1);
10625 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10627 tem = build_real (type, r);
10628 return fold_build2 (MULT_EXPR, type,
10629 fold_convert (type, arg0), tem);
10633 /* Convert A/B/C to A/(B*C). */
10634 if (flag_unsafe_math_optimizations
10635 && TREE_CODE (arg0) == RDIV_EXPR)
10636 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10637 fold_build2 (MULT_EXPR, type,
10638 TREE_OPERAND (arg0, 1), arg1));
10640 /* Convert A/(B/C) to (A/B)*C. */
10641 if (flag_unsafe_math_optimizations
10642 && TREE_CODE (arg1) == RDIV_EXPR)
10643 return fold_build2 (MULT_EXPR, type,
10644 fold_build2 (RDIV_EXPR, type, arg0,
10645 TREE_OPERAND (arg1, 0)),
10646 TREE_OPERAND (arg1, 1));
10648 /* Convert C1/(X*C2) into (C1/C2)/X. */
10649 if (flag_unsafe_math_optimizations
10650 && TREE_CODE (arg1) == MULT_EXPR
10651 && TREE_CODE (arg0) == REAL_CST
10652 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10654 tree tem = const_binop (RDIV_EXPR, arg0,
10655 TREE_OPERAND (arg1, 1), 0);
10657 return fold_build2 (RDIV_EXPR, type, tem,
10658 TREE_OPERAND (arg1, 0));
10661 if (flag_unsafe_math_optimizations)
10663 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10664 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10666 /* Optimize sin(x)/cos(x) as tan(x). */
10667 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10668 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10669 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10670 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10671 CALL_EXPR_ARG (arg1, 0), 0))
10673 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10675 if (tanfn != NULL_TREE)
10676 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10679 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10680 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10681 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10682 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10683 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10684 CALL_EXPR_ARG (arg1, 0), 0))
10686 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10688 if (tanfn != NULL_TREE)
10690 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10691 return fold_build2 (RDIV_EXPR, type,
10692 build_real (type, dconst1), tmp);
10696 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10697 NaNs or Infinities. */
10698 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10699 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10700 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10702 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10703 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10705 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10706 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10707 && operand_equal_p (arg00, arg01, 0))
10709 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10711 if (cosfn != NULL_TREE)
10712 return build_call_expr (cosfn, 1, arg00);
10716 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10717 NaNs or Infinities. */
10718 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10719 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10720 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10722 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10723 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10725 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10726 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10727 && operand_equal_p (arg00, arg01, 0))
10729 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10731 if (cosfn != NULL_TREE)
10733 tree tmp = build_call_expr (cosfn, 1, arg00);
10734 return fold_build2 (RDIV_EXPR, type,
10735 build_real (type, dconst1),
10741 /* Optimize pow(x,c)/x as pow(x,c-1). */
10742 if (fcode0 == BUILT_IN_POW
10743 || fcode0 == BUILT_IN_POWF
10744 || fcode0 == BUILT_IN_POWL)
10746 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10747 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10748 if (TREE_CODE (arg01) == REAL_CST
10749 && !TREE_OVERFLOW (arg01)
10750 && operand_equal_p (arg1, arg00, 0))
10752 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10756 c = TREE_REAL_CST (arg01);
10757 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10758 arg = build_real (type, c);
10759 return build_call_expr (powfn, 2, arg1, arg);
10763 /* Optimize a/root(b/c) into a*root(c/b). */
10764 if (BUILTIN_ROOT_P (fcode1))
10766 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10768 if (TREE_CODE (rootarg) == RDIV_EXPR)
10770 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10771 tree b = TREE_OPERAND (rootarg, 0);
10772 tree c = TREE_OPERAND (rootarg, 1);
10774 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
10776 tmp = build_call_expr (rootfn, 1, tmp);
10777 return fold_build2 (MULT_EXPR, type, arg0, tmp);
10781 /* Optimize x/expN(y) into x*expN(-y). */
10782 if (BUILTIN_EXPONENT_P (fcode1))
10784 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10785 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10786 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10787 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10790 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10791 if (fcode1 == BUILT_IN_POW
10792 || fcode1 == BUILT_IN_POWF
10793 || fcode1 == BUILT_IN_POWL)
10795 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10796 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10797 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10798 tree neg11 = fold_convert (type, negate_expr (arg11));
10799 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10800 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10805 case TRUNC_DIV_EXPR:
10806 case FLOOR_DIV_EXPR:
10807 /* Simplify A / (B << N) where A and B are positive and B is
10808 a power of 2, to A >> (N + log2(B)). */
10809 strict_overflow_p = false;
10810 if (TREE_CODE (arg1) == LSHIFT_EXPR
10811 && (TYPE_UNSIGNED (type)
10812 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10814 tree sval = TREE_OPERAND (arg1, 0);
10815 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10817 tree sh_cnt = TREE_OPERAND (arg1, 1);
10818 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10820 if (strict_overflow_p)
10821 fold_overflow_warning (("assuming signed overflow does not "
10822 "occur when simplifying A / (B << N)"),
10823 WARN_STRICT_OVERFLOW_MISC);
10825 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10826 sh_cnt, build_int_cst (NULL_TREE, pow2));
10827 return fold_build2 (RSHIFT_EXPR, type,
10828 fold_convert (type, arg0), sh_cnt);
10833 case ROUND_DIV_EXPR:
10834 case CEIL_DIV_EXPR:
10835 case EXACT_DIV_EXPR:
10836 if (integer_onep (arg1))
10837 return non_lvalue (fold_convert (type, arg0));
10838 if (integer_zerop (arg1))
10840 /* X / -1 is -X. */
10841 if (!TYPE_UNSIGNED (type)
10842 && TREE_CODE (arg1) == INTEGER_CST
10843 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10844 && TREE_INT_CST_HIGH (arg1) == -1)
10845 return fold_convert (type, negate_expr (arg0));
10847 /* Convert -A / -B to A / B when the type is signed and overflow is
10849 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10850 && TREE_CODE (arg0) == NEGATE_EXPR
10851 && negate_expr_p (arg1))
10853 if (INTEGRAL_TYPE_P (type))
10854 fold_overflow_warning (("assuming signed overflow does not occur "
10855 "when distributing negation across "
10857 WARN_STRICT_OVERFLOW_MISC);
10858 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10859 negate_expr (arg1));
10861 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10862 && TREE_CODE (arg1) == NEGATE_EXPR
10863 && negate_expr_p (arg0))
10865 if (INTEGRAL_TYPE_P (type))
10866 fold_overflow_warning (("assuming signed overflow does not occur "
10867 "when distributing negation across "
10869 WARN_STRICT_OVERFLOW_MISC);
10870 return fold_build2 (code, type, negate_expr (arg0),
10871 TREE_OPERAND (arg1, 0));
10874 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10875 operation, EXACT_DIV_EXPR.
10877 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10878 At one time others generated faster code, it's not clear if they do
10879 after the last round to changes to the DIV code in expmed.c. */
10880 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10881 && multiple_of_p (type, arg0, arg1))
10882 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10884 strict_overflow_p = false;
10885 if (TREE_CODE (arg1) == INTEGER_CST
10886 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10887 &strict_overflow_p)))
10889 if (strict_overflow_p)
10890 fold_overflow_warning (("assuming signed overflow does not occur "
10891 "when simplifying division"),
10892 WARN_STRICT_OVERFLOW_MISC);
10893 return fold_convert (type, tem);
10898 case CEIL_MOD_EXPR:
10899 case FLOOR_MOD_EXPR:
10900 case ROUND_MOD_EXPR:
10901 case TRUNC_MOD_EXPR:
10902 /* X % 1 is always zero, but be sure to preserve any side
10904 if (integer_onep (arg1))
10905 return omit_one_operand (type, integer_zero_node, arg0);
10907 /* X % 0, return X % 0 unchanged so that we can get the
10908 proper warnings and errors. */
10909 if (integer_zerop (arg1))
10912 /* 0 % X is always zero, but be sure to preserve any side
10913 effects in X. Place this after checking for X == 0. */
10914 if (integer_zerop (arg0))
10915 return omit_one_operand (type, integer_zero_node, arg1);
10917 /* X % -1 is zero. */
10918 if (!TYPE_UNSIGNED (type)
10919 && TREE_CODE (arg1) == INTEGER_CST
10920 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10921 && TREE_INT_CST_HIGH (arg1) == -1)
10922 return omit_one_operand (type, integer_zero_node, arg0);
10924 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10925 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10926 strict_overflow_p = false;
10927 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10928 && (TYPE_UNSIGNED (type)
10929 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10932 /* Also optimize A % (C << N) where C is a power of 2,
10933 to A & ((C << N) - 1). */
10934 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10935 c = TREE_OPERAND (arg1, 0);
10937 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10939 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10940 build_int_cst (TREE_TYPE (arg1), 1));
10941 if (strict_overflow_p)
10942 fold_overflow_warning (("assuming signed overflow does not "
10943 "occur when simplifying "
10944 "X % (power of two)"),
10945 WARN_STRICT_OVERFLOW_MISC);
10946 return fold_build2 (BIT_AND_EXPR, type,
10947 fold_convert (type, arg0),
10948 fold_convert (type, mask));
10952 /* X % -C is the same as X % C. */
10953 if (code == TRUNC_MOD_EXPR
10954 && !TYPE_UNSIGNED (type)
10955 && TREE_CODE (arg1) == INTEGER_CST
10956 && !TREE_OVERFLOW (arg1)
10957 && TREE_INT_CST_HIGH (arg1) < 0
10958 && !TYPE_OVERFLOW_TRAPS (type)
10959 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10960 && !sign_bit_p (arg1, arg1))
10961 return fold_build2 (code, type, fold_convert (type, arg0),
10962 fold_convert (type, negate_expr (arg1)));
10964 /* X % -Y is the same as X % Y. */
10965 if (code == TRUNC_MOD_EXPR
10966 && !TYPE_UNSIGNED (type)
10967 && TREE_CODE (arg1) == NEGATE_EXPR
10968 && !TYPE_OVERFLOW_TRAPS (type))
10969 return fold_build2 (code, type, fold_convert (type, arg0),
10970 fold_convert (type, TREE_OPERAND (arg1, 0)));
10972 if (TREE_CODE (arg1) == INTEGER_CST
10973 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10974 &strict_overflow_p)))
10976 if (strict_overflow_p)
10977 fold_overflow_warning (("assuming signed overflow does not occur "
10978 "when simplifying modulos"),
10979 WARN_STRICT_OVERFLOW_MISC);
10980 return fold_convert (type, tem);
10987 if (integer_all_onesp (arg0))
10988 return omit_one_operand (type, arg0, arg1);
10992 /* Optimize -1 >> x for arithmetic right shifts. */
10993 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10994 return omit_one_operand (type, arg0, arg1);
10995 /* ... fall through ... */
10999 if (integer_zerop (arg1))
11000 return non_lvalue (fold_convert (type, arg0));
11001 if (integer_zerop (arg0))
11002 return omit_one_operand (type, arg0, arg1);
11004 /* Since negative shift count is not well-defined,
11005 don't try to compute it in the compiler. */
11006 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11009 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11010 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11011 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11012 && host_integerp (TREE_OPERAND (arg0, 1), false)
11013 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11015 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11016 + TREE_INT_CST_LOW (arg1));
11018 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11019 being well defined. */
11020 if (low >= TYPE_PRECISION (type))
11022 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11023 low = low % TYPE_PRECISION (type);
11024 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11025 return build_int_cst (type, 0);
11027 low = TYPE_PRECISION (type) - 1;
11030 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11031 build_int_cst (type, low));
11034 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11035 into x & ((unsigned)-1 >> c) for unsigned types. */
11036 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11037 || (TYPE_UNSIGNED (type)
11038 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11039 && host_integerp (arg1, false)
11040 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11041 && host_integerp (TREE_OPERAND (arg0, 1), false)
11042 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11044 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11045 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11051 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11053 lshift = build_int_cst (type, -1);
11054 lshift = int_const_binop (code, lshift, arg1, 0);
11056 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11060 /* Rewrite an LROTATE_EXPR by a constant into an
11061 RROTATE_EXPR by a new constant. */
11062 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11064 tree tem = build_int_cst (TREE_TYPE (arg1),
11065 GET_MODE_BITSIZE (TYPE_MODE (type)));
11066 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11067 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
11070 /* If we have a rotate of a bit operation with the rotate count and
11071 the second operand of the bit operation both constant,
11072 permute the two operations. */
11073 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11074 && (TREE_CODE (arg0) == BIT_AND_EXPR
11075 || TREE_CODE (arg0) == BIT_IOR_EXPR
11076 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11077 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11078 return fold_build2 (TREE_CODE (arg0), type,
11079 fold_build2 (code, type,
11080 TREE_OPERAND (arg0, 0), arg1),
11081 fold_build2 (code, type,
11082 TREE_OPERAND (arg0, 1), arg1));
11084 /* Two consecutive rotates adding up to the width of the mode can
11086 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11087 && TREE_CODE (arg0) == RROTATE_EXPR
11088 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11089 && TREE_INT_CST_HIGH (arg1) == 0
11090 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11091 && ((TREE_INT_CST_LOW (arg1)
11092 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11093 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
11094 return TREE_OPERAND (arg0, 0);
11099 if (operand_equal_p (arg0, arg1, 0))
11100 return omit_one_operand (type, arg0, arg1);
11101 if (INTEGRAL_TYPE_P (type)
11102 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11103 return omit_one_operand (type, arg1, arg0);
11104 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11110 if (operand_equal_p (arg0, arg1, 0))
11111 return omit_one_operand (type, arg0, arg1);
11112 if (INTEGRAL_TYPE_P (type)
11113 && TYPE_MAX_VALUE (type)
11114 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11115 return omit_one_operand (type, arg1, arg0);
11116 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11121 case TRUTH_ANDIF_EXPR:
11122 /* Note that the operands of this must be ints
11123 and their values must be 0 or 1.
11124 ("true" is a fixed value perhaps depending on the language.) */
11125 /* If first arg is constant zero, return it. */
11126 if (integer_zerop (arg0))
11127 return fold_convert (type, arg0);
11128 case TRUTH_AND_EXPR:
11129 /* If either arg is constant true, drop it. */
11130 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11131 return non_lvalue (fold_convert (type, arg1));
11132 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11133 /* Preserve sequence points. */
11134 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11135 return non_lvalue (fold_convert (type, arg0));
11136 /* If second arg is constant zero, result is zero, but first arg
11137 must be evaluated. */
11138 if (integer_zerop (arg1))
11139 return omit_one_operand (type, arg1, arg0);
11140 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11141 case will be handled here. */
11142 if (integer_zerop (arg0))
11143 return omit_one_operand (type, arg0, arg1);
11145 /* !X && X is always false. */
11146 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11147 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11148 return omit_one_operand (type, integer_zero_node, arg1);
11149 /* X && !X is always false. */
11150 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11151 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11152 return omit_one_operand (type, integer_zero_node, arg0);
11154 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11155 means A >= Y && A != MAX, but in this case we know that
11158 if (!TREE_SIDE_EFFECTS (arg0)
11159 && !TREE_SIDE_EFFECTS (arg1))
11161 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11162 if (tem && !operand_equal_p (tem, arg0, 0))
11163 return fold_build2 (code, type, tem, arg1);
11165 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11166 if (tem && !operand_equal_p (tem, arg1, 0))
11167 return fold_build2 (code, type, arg0, tem);
11171 /* We only do these simplifications if we are optimizing. */
11175 /* Check for things like (A || B) && (A || C). We can convert this
11176 to A || (B && C). Note that either operator can be any of the four
11177 truth and/or operations and the transformation will still be
11178 valid. Also note that we only care about order for the
11179 ANDIF and ORIF operators. If B contains side effects, this
11180 might change the truth-value of A. */
11181 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11182 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11183 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11184 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11185 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11186 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11188 tree a00 = TREE_OPERAND (arg0, 0);
11189 tree a01 = TREE_OPERAND (arg0, 1);
11190 tree a10 = TREE_OPERAND (arg1, 0);
11191 tree a11 = TREE_OPERAND (arg1, 1);
11192 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11193 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11194 && (code == TRUTH_AND_EXPR
11195 || code == TRUTH_OR_EXPR));
11197 if (operand_equal_p (a00, a10, 0))
11198 return fold_build2 (TREE_CODE (arg0), type, a00,
11199 fold_build2 (code, type, a01, a11));
11200 else if (commutative && operand_equal_p (a00, a11, 0))
11201 return fold_build2 (TREE_CODE (arg0), type, a00,
11202 fold_build2 (code, type, a01, a10));
11203 else if (commutative && operand_equal_p (a01, a10, 0))
11204 return fold_build2 (TREE_CODE (arg0), type, a01,
11205 fold_build2 (code, type, a00, a11));
11207 /* This case if tricky because we must either have commutative
11208 operators or else A10 must not have side-effects. */
11210 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11211 && operand_equal_p (a01, a11, 0))
11212 return fold_build2 (TREE_CODE (arg0), type,
11213 fold_build2 (code, type, a00, a10),
11217 /* See if we can build a range comparison. */
11218 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11221 /* Check for the possibility of merging component references. If our
11222 lhs is another similar operation, try to merge its rhs with our
11223 rhs. Then try to merge our lhs and rhs. */
11224 if (TREE_CODE (arg0) == code
11225 && 0 != (tem = fold_truthop (code, type,
11226 TREE_OPERAND (arg0, 1), arg1)))
11227 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11229 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11234 case TRUTH_ORIF_EXPR:
11235 /* Note that the operands of this must be ints
11236 and their values must be 0 or true.
11237 ("true" is a fixed value perhaps depending on the language.) */
11238 /* If first arg is constant true, return it. */
11239 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11240 return fold_convert (type, arg0);
11241 case TRUTH_OR_EXPR:
11242 /* If either arg is constant zero, drop it. */
11243 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11244 return non_lvalue (fold_convert (type, arg1));
11245 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11246 /* Preserve sequence points. */
11247 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11248 return non_lvalue (fold_convert (type, arg0));
11249 /* If second arg is constant true, result is true, but we must
11250 evaluate first arg. */
11251 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11252 return omit_one_operand (type, arg1, arg0);
11253 /* Likewise for first arg, but note this only occurs here for
11255 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11256 return omit_one_operand (type, arg0, arg1);
11258 /* !X || X is always true. */
11259 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11260 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11261 return omit_one_operand (type, integer_one_node, arg1);
11262 /* X || !X is always true. */
11263 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11264 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11265 return omit_one_operand (type, integer_one_node, arg0);
11269 case TRUTH_XOR_EXPR:
11270 /* If the second arg is constant zero, drop it. */
11271 if (integer_zerop (arg1))
11272 return non_lvalue (fold_convert (type, arg0));
11273 /* If the second arg is constant true, this is a logical inversion. */
11274 if (integer_onep (arg1))
11276 /* Only call invert_truthvalue if operand is a truth value. */
11277 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11278 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11280 tem = invert_truthvalue (arg0);
11281 return non_lvalue (fold_convert (type, tem));
11283 /* Identical arguments cancel to zero. */
11284 if (operand_equal_p (arg0, arg1, 0))
11285 return omit_one_operand (type, integer_zero_node, arg0);
11287 /* !X ^ X is always true. */
11288 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11289 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11290 return omit_one_operand (type, integer_one_node, arg1);
11292 /* X ^ !X is always true. */
11293 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11294 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11295 return omit_one_operand (type, integer_one_node, arg0);
11301 tem = fold_comparison (code, type, op0, op1);
11302 if (tem != NULL_TREE)
11305 /* bool_var != 0 becomes bool_var. */
11306 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11307 && code == NE_EXPR)
11308 return non_lvalue (fold_convert (type, arg0));
11310 /* bool_var == 1 becomes bool_var. */
11311 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11312 && code == EQ_EXPR)
11313 return non_lvalue (fold_convert (type, arg0));
11315 /* bool_var != 1 becomes !bool_var. */
11316 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11317 && code == NE_EXPR)
11318 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11320 /* bool_var == 0 becomes !bool_var. */
11321 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11322 && code == EQ_EXPR)
11323 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11325 /* If this is an equality comparison of the address of two non-weak,
11326 unaliased symbols neither of which are extern (since we do not
11327 have access to attributes for externs), then we know the result. */
11328 if (TREE_CODE (arg0) == ADDR_EXPR
11329 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11330 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11331 && ! lookup_attribute ("alias",
11332 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11333 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11334 && TREE_CODE (arg1) == ADDR_EXPR
11335 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11336 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11337 && ! lookup_attribute ("alias",
11338 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11339 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11341 /* We know that we're looking at the address of two
11342 non-weak, unaliased, static _DECL nodes.
11344 It is both wasteful and incorrect to call operand_equal_p
11345 to compare the two ADDR_EXPR nodes. It is wasteful in that
11346 all we need to do is test pointer equality for the arguments
11347 to the two ADDR_EXPR nodes. It is incorrect to use
11348 operand_equal_p as that function is NOT equivalent to a
11349 C equality test. It can in fact return false for two
11350 objects which would test as equal using the C equality
11352 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11353 return constant_boolean_node (equal
11354 ? code == EQ_EXPR : code != EQ_EXPR,
11358 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11359 a MINUS_EXPR of a constant, we can convert it into a comparison with
11360 a revised constant as long as no overflow occurs. */
11361 if (TREE_CODE (arg1) == INTEGER_CST
11362 && (TREE_CODE (arg0) == PLUS_EXPR
11363 || TREE_CODE (arg0) == MINUS_EXPR)
11364 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11365 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11366 ? MINUS_EXPR : PLUS_EXPR,
11367 fold_convert (TREE_TYPE (arg0), arg1),
11368 TREE_OPERAND (arg0, 1), 0))
11369 && !TREE_OVERFLOW (tem))
11370 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11372 /* Similarly for a NEGATE_EXPR. */
11373 if (TREE_CODE (arg0) == NEGATE_EXPR
11374 && TREE_CODE (arg1) == INTEGER_CST
11375 && 0 != (tem = negate_expr (arg1))
11376 && TREE_CODE (tem) == INTEGER_CST
11377 && !TREE_OVERFLOW (tem))
11378 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11380 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11381 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11382 && TREE_CODE (arg1) == INTEGER_CST
11383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11384 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11385 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11386 fold_convert (TREE_TYPE (arg0), arg1),
11387 TREE_OPERAND (arg0, 1)));
11389 /* Transform comparisons of the form X +- C CMP X. */
11390 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11391 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11392 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11393 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11394 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11396 tree cst = TREE_OPERAND (arg0, 1);
11398 if (code == EQ_EXPR
11399 && !integer_zerop (cst))
11400 return omit_two_operands (type, boolean_false_node,
11401 TREE_OPERAND (arg0, 0), arg1);
11403 return omit_two_operands (type, boolean_true_node,
11404 TREE_OPERAND (arg0, 0), arg1);
11407 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11408 for !=. Don't do this for ordered comparisons due to overflow. */
11409 if (TREE_CODE (arg0) == MINUS_EXPR
11410 && integer_zerop (arg1))
11411 return fold_build2 (code, type,
11412 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11414 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11415 if (TREE_CODE (arg0) == ABS_EXPR
11416 && (integer_zerop (arg1) || real_zerop (arg1)))
11417 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11419 /* If this is an EQ or NE comparison with zero and ARG0 is
11420 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11421 two operations, but the latter can be done in one less insn
11422 on machines that have only two-operand insns or on which a
11423 constant cannot be the first operand. */
11424 if (TREE_CODE (arg0) == BIT_AND_EXPR
11425 && integer_zerop (arg1))
11427 tree arg00 = TREE_OPERAND (arg0, 0);
11428 tree arg01 = TREE_OPERAND (arg0, 1);
11429 if (TREE_CODE (arg00) == LSHIFT_EXPR
11430 && integer_onep (TREE_OPERAND (arg00, 0)))
11432 fold_build2 (code, type,
11433 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11434 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11435 arg01, TREE_OPERAND (arg00, 1)),
11436 fold_convert (TREE_TYPE (arg0),
11437 integer_one_node)),
11439 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11440 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11442 fold_build2 (code, type,
11443 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11444 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11445 arg00, TREE_OPERAND (arg01, 1)),
11446 fold_convert (TREE_TYPE (arg0),
11447 integer_one_node)),
11451 /* If this is an NE or EQ comparison of zero against the result of a
11452 signed MOD operation whose second operand is a power of 2, make
11453 the MOD operation unsigned since it is simpler and equivalent. */
11454 if (integer_zerop (arg1)
11455 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11456 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11457 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11458 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11459 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11460 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11462 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11463 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11464 fold_convert (newtype,
11465 TREE_OPERAND (arg0, 0)),
11466 fold_convert (newtype,
11467 TREE_OPERAND (arg0, 1)));
11469 return fold_build2 (code, type, newmod,
11470 fold_convert (newtype, arg1));
11473 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11474 C1 is a valid shift constant, and C2 is a power of two, i.e.
11476 if (TREE_CODE (arg0) == BIT_AND_EXPR
11477 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11478 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11480 && integer_pow2p (TREE_OPERAND (arg0, 1))
11481 && integer_zerop (arg1))
11483 tree itype = TREE_TYPE (arg0);
11484 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11485 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11487 /* Check for a valid shift count. */
11488 if (TREE_INT_CST_HIGH (arg001) == 0
11489 && TREE_INT_CST_LOW (arg001) < prec)
11491 tree arg01 = TREE_OPERAND (arg0, 1);
11492 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11493 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11494 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11495 can be rewritten as (X & (C2 << C1)) != 0. */
11496 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11498 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11499 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11500 return fold_build2 (code, type, tem, arg1);
11502 /* Otherwise, for signed (arithmetic) shifts,
11503 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11504 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11505 else if (!TYPE_UNSIGNED (itype))
11506 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11507 arg000, build_int_cst (itype, 0));
11508 /* Otherwise, of unsigned (logical) shifts,
11509 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11510 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11512 return omit_one_operand (type,
11513 code == EQ_EXPR ? integer_one_node
11514 : integer_zero_node,
11519 /* If this is an NE comparison of zero with an AND of one, remove the
11520 comparison since the AND will give the correct value. */
11521 if (code == NE_EXPR
11522 && integer_zerop (arg1)
11523 && TREE_CODE (arg0) == BIT_AND_EXPR
11524 && integer_onep (TREE_OPERAND (arg0, 1)))
11525 return fold_convert (type, arg0);
11527 /* If we have (A & C) == C where C is a power of 2, convert this into
11528 (A & C) != 0. Similarly for NE_EXPR. */
11529 if (TREE_CODE (arg0) == BIT_AND_EXPR
11530 && integer_pow2p (TREE_OPERAND (arg0, 1))
11531 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11532 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11533 arg0, fold_convert (TREE_TYPE (arg0),
11534 integer_zero_node));
11536 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11537 bit, then fold the expression into A < 0 or A >= 0. */
11538 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11542 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11543 Similarly for NE_EXPR. */
11544 if (TREE_CODE (arg0) == BIT_AND_EXPR
11545 && TREE_CODE (arg1) == INTEGER_CST
11546 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11548 tree notc = fold_build1 (BIT_NOT_EXPR,
11549 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11550 TREE_OPERAND (arg0, 1));
11551 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11553 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11554 if (integer_nonzerop (dandnotc))
11555 return omit_one_operand (type, rslt, arg0);
11558 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11559 Similarly for NE_EXPR. */
11560 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11561 && TREE_CODE (arg1) == INTEGER_CST
11562 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11564 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11565 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11566 TREE_OPERAND (arg0, 1), notd);
11567 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11568 if (integer_nonzerop (candnotd))
11569 return omit_one_operand (type, rslt, arg0);
11572 /* If this is a comparison of a field, we may be able to simplify it. */
11573 if ((TREE_CODE (arg0) == COMPONENT_REF
11574 || TREE_CODE (arg0) == BIT_FIELD_REF)
11575 /* Handle the constant case even without -O
11576 to make sure the warnings are given. */
11577 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11579 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11584 /* Optimize comparisons of strlen vs zero to a compare of the
11585 first character of the string vs zero. To wit,
11586 strlen(ptr) == 0 => *ptr == 0
11587 strlen(ptr) != 0 => *ptr != 0
11588 Other cases should reduce to one of these two (or a constant)
11589 due to the return value of strlen being unsigned. */
11590 if (TREE_CODE (arg0) == CALL_EXPR
11591 && integer_zerop (arg1))
11593 tree fndecl = get_callee_fndecl (arg0);
11596 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11597 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11598 && call_expr_nargs (arg0) == 1
11599 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11601 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11602 return fold_build2 (code, type, iref,
11603 build_int_cst (TREE_TYPE (iref), 0));
11607 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11608 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11609 if (TREE_CODE (arg0) == RSHIFT_EXPR
11610 && integer_zerop (arg1)
11611 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11613 tree arg00 = TREE_OPERAND (arg0, 0);
11614 tree arg01 = TREE_OPERAND (arg0, 1);
11615 tree itype = TREE_TYPE (arg00);
11616 if (TREE_INT_CST_HIGH (arg01) == 0
11617 && TREE_INT_CST_LOW (arg01)
11618 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11620 if (TYPE_UNSIGNED (itype))
11622 itype = signed_type_for (itype);
11623 arg00 = fold_convert (itype, arg00);
11625 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11626 type, arg00, build_int_cst (itype, 0));
11630 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11631 if (integer_zerop (arg1)
11632 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11633 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11634 TREE_OPERAND (arg0, 1));
11636 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11637 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11638 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11639 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11640 build_int_cst (TREE_TYPE (arg1), 0));
11641 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11642 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11643 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11644 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11645 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11646 build_int_cst (TREE_TYPE (arg1), 0));
11648 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11649 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11650 && TREE_CODE (arg1) == INTEGER_CST
11651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11652 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11653 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11654 TREE_OPERAND (arg0, 1), arg1));
11656 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11657 (X & C) == 0 when C is a single bit. */
11658 if (TREE_CODE (arg0) == BIT_AND_EXPR
11659 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11660 && integer_zerop (arg1)
11661 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11663 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11664 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11665 TREE_OPERAND (arg0, 1));
11666 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11670 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11671 constant C is a power of two, i.e. a single bit. */
11672 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11673 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11674 && integer_zerop (arg1)
11675 && integer_pow2p (TREE_OPERAND (arg0, 1))
11676 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11677 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11679 tree arg00 = TREE_OPERAND (arg0, 0);
11680 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11681 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11684 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11685 when is C is a power of two, i.e. a single bit. */
11686 if (TREE_CODE (arg0) == BIT_AND_EXPR
11687 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11688 && integer_zerop (arg1)
11689 && integer_pow2p (TREE_OPERAND (arg0, 1))
11690 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11691 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11693 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11694 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11695 arg000, TREE_OPERAND (arg0, 1));
11696 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11697 tem, build_int_cst (TREE_TYPE (tem), 0));
11700 if (integer_zerop (arg1)
11701 && tree_expr_nonzero_p (arg0))
11703 tree res = constant_boolean_node (code==NE_EXPR, type);
11704 return omit_one_operand (type, res, arg0);
11707 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11708 if (TREE_CODE (arg0) == NEGATE_EXPR
11709 && TREE_CODE (arg1) == NEGATE_EXPR)
11710 return fold_build2 (code, type,
11711 TREE_OPERAND (arg0, 0),
11712 TREE_OPERAND (arg1, 0));
11714 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11715 if (TREE_CODE (arg0) == BIT_AND_EXPR
11716 && TREE_CODE (arg1) == BIT_AND_EXPR)
11718 tree arg00 = TREE_OPERAND (arg0, 0);
11719 tree arg01 = TREE_OPERAND (arg0, 1);
11720 tree arg10 = TREE_OPERAND (arg1, 0);
11721 tree arg11 = TREE_OPERAND (arg1, 1);
11722 tree itype = TREE_TYPE (arg0);
11724 if (operand_equal_p (arg01, arg11, 0))
11725 return fold_build2 (code, type,
11726 fold_build2 (BIT_AND_EXPR, itype,
11727 fold_build2 (BIT_XOR_EXPR, itype,
11730 build_int_cst (itype, 0));
11732 if (operand_equal_p (arg01, arg10, 0))
11733 return fold_build2 (code, type,
11734 fold_build2 (BIT_AND_EXPR, itype,
11735 fold_build2 (BIT_XOR_EXPR, itype,
11738 build_int_cst (itype, 0));
11740 if (operand_equal_p (arg00, arg11, 0))
11741 return fold_build2 (code, type,
11742 fold_build2 (BIT_AND_EXPR, itype,
11743 fold_build2 (BIT_XOR_EXPR, itype,
11746 build_int_cst (itype, 0));
11748 if (operand_equal_p (arg00, arg10, 0))
11749 return fold_build2 (code, type,
11750 fold_build2 (BIT_AND_EXPR, itype,
11751 fold_build2 (BIT_XOR_EXPR, itype,
11754 build_int_cst (itype, 0));
11757 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11758 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11760 tree arg00 = TREE_OPERAND (arg0, 0);
11761 tree arg01 = TREE_OPERAND (arg0, 1);
11762 tree arg10 = TREE_OPERAND (arg1, 0);
11763 tree arg11 = TREE_OPERAND (arg1, 1);
11764 tree itype = TREE_TYPE (arg0);
11766 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11767 operand_equal_p guarantees no side-effects so we don't need
11768 to use omit_one_operand on Z. */
11769 if (operand_equal_p (arg01, arg11, 0))
11770 return fold_build2 (code, type, arg00, arg10);
11771 if (operand_equal_p (arg01, arg10, 0))
11772 return fold_build2 (code, type, arg00, arg11);
11773 if (operand_equal_p (arg00, arg11, 0))
11774 return fold_build2 (code, type, arg01, arg10);
11775 if (operand_equal_p (arg00, arg10, 0))
11776 return fold_build2 (code, type, arg01, arg11);
11778 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11779 if (TREE_CODE (arg01) == INTEGER_CST
11780 && TREE_CODE (arg11) == INTEGER_CST)
11781 return fold_build2 (code, type,
11782 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11783 fold_build2 (BIT_XOR_EXPR, itype,
11788 /* Attempt to simplify equality/inequality comparisons of complex
11789 values. Only lower the comparison if the result is known or
11790 can be simplified to a single scalar comparison. */
11791 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11792 || TREE_CODE (arg0) == COMPLEX_CST)
11793 && (TREE_CODE (arg1) == COMPLEX_EXPR
11794 || TREE_CODE (arg1) == COMPLEX_CST))
11796 tree real0, imag0, real1, imag1;
11799 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11801 real0 = TREE_OPERAND (arg0, 0);
11802 imag0 = TREE_OPERAND (arg0, 1);
11806 real0 = TREE_REALPART (arg0);
11807 imag0 = TREE_IMAGPART (arg0);
11810 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11812 real1 = TREE_OPERAND (arg1, 0);
11813 imag1 = TREE_OPERAND (arg1, 1);
11817 real1 = TREE_REALPART (arg1);
11818 imag1 = TREE_IMAGPART (arg1);
11821 rcond = fold_binary (code, type, real0, real1);
11822 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11824 if (integer_zerop (rcond))
11826 if (code == EQ_EXPR)
11827 return omit_two_operands (type, boolean_false_node,
11829 return fold_build2 (NE_EXPR, type, imag0, imag1);
11833 if (code == NE_EXPR)
11834 return omit_two_operands (type, boolean_true_node,
11836 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11840 icond = fold_binary (code, type, imag0, imag1);
11841 if (icond && TREE_CODE (icond) == INTEGER_CST)
11843 if (integer_zerop (icond))
11845 if (code == EQ_EXPR)
11846 return omit_two_operands (type, boolean_false_node,
11848 return fold_build2 (NE_EXPR, type, real0, real1);
11852 if (code == NE_EXPR)
11853 return omit_two_operands (type, boolean_true_node,
11855 return fold_build2 (EQ_EXPR, type, real0, real1);
11866 tem = fold_comparison (code, type, op0, op1);
11867 if (tem != NULL_TREE)
11870 /* Transform comparisons of the form X +- C CMP X. */
11871 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11872 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11873 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11874 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11875 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11876 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11878 tree arg01 = TREE_OPERAND (arg0, 1);
11879 enum tree_code code0 = TREE_CODE (arg0);
11882 if (TREE_CODE (arg01) == REAL_CST)
11883 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11885 is_positive = tree_int_cst_sgn (arg01);
11887 /* (X - c) > X becomes false. */
11888 if (code == GT_EXPR
11889 && ((code0 == MINUS_EXPR && is_positive >= 0)
11890 || (code0 == PLUS_EXPR && is_positive <= 0)))
11892 if (TREE_CODE (arg01) == INTEGER_CST
11893 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11894 fold_overflow_warning (("assuming signed overflow does not "
11895 "occur when assuming that (X - c) > X "
11896 "is always false"),
11897 WARN_STRICT_OVERFLOW_ALL);
11898 return constant_boolean_node (0, type);
11901 /* Likewise (X + c) < X becomes false. */
11902 if (code == LT_EXPR
11903 && ((code0 == PLUS_EXPR && is_positive >= 0)
11904 || (code0 == MINUS_EXPR && is_positive <= 0)))
11906 if (TREE_CODE (arg01) == INTEGER_CST
11907 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11908 fold_overflow_warning (("assuming signed overflow does not "
11909 "occur when assuming that "
11910 "(X + c) < X is always false"),
11911 WARN_STRICT_OVERFLOW_ALL);
11912 return constant_boolean_node (0, type);
11915 /* Convert (X - c) <= X to true. */
11916 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11918 && ((code0 == MINUS_EXPR && is_positive >= 0)
11919 || (code0 == PLUS_EXPR && is_positive <= 0)))
11921 if (TREE_CODE (arg01) == INTEGER_CST
11922 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11923 fold_overflow_warning (("assuming signed overflow does not "
11924 "occur when assuming that "
11925 "(X - c) <= X is always true"),
11926 WARN_STRICT_OVERFLOW_ALL);
11927 return constant_boolean_node (1, type);
11930 /* Convert (X + c) >= X to true. */
11931 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11933 && ((code0 == PLUS_EXPR && is_positive >= 0)
11934 || (code0 == MINUS_EXPR && is_positive <= 0)))
11936 if (TREE_CODE (arg01) == INTEGER_CST
11937 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11938 fold_overflow_warning (("assuming signed overflow does not "
11939 "occur when assuming that "
11940 "(X + c) >= X is always true"),
11941 WARN_STRICT_OVERFLOW_ALL);
11942 return constant_boolean_node (1, type);
11945 if (TREE_CODE (arg01) == INTEGER_CST)
11947 /* Convert X + c > X and X - c < X to true for integers. */
11948 if (code == GT_EXPR
11949 && ((code0 == PLUS_EXPR && is_positive > 0)
11950 || (code0 == MINUS_EXPR && is_positive < 0)))
11952 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11953 fold_overflow_warning (("assuming signed overflow does "
11954 "not occur when assuming that "
11955 "(X + c) > X is always true"),
11956 WARN_STRICT_OVERFLOW_ALL);
11957 return constant_boolean_node (1, type);
11960 if (code == LT_EXPR
11961 && ((code0 == MINUS_EXPR && is_positive > 0)
11962 || (code0 == PLUS_EXPR && is_positive < 0)))
11964 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11965 fold_overflow_warning (("assuming signed overflow does "
11966 "not occur when assuming that "
11967 "(X - c) < X is always true"),
11968 WARN_STRICT_OVERFLOW_ALL);
11969 return constant_boolean_node (1, type);
11972 /* Convert X + c <= X and X - c >= X to false for integers. */
11973 if (code == LE_EXPR
11974 && ((code0 == PLUS_EXPR && is_positive > 0)
11975 || (code0 == MINUS_EXPR && is_positive < 0)))
11977 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11978 fold_overflow_warning (("assuming signed overflow does "
11979 "not occur when assuming that "
11980 "(X + c) <= X is always false"),
11981 WARN_STRICT_OVERFLOW_ALL);
11982 return constant_boolean_node (0, type);
11985 if (code == GE_EXPR
11986 && ((code0 == MINUS_EXPR && is_positive > 0)
11987 || (code0 == PLUS_EXPR && is_positive < 0)))
11989 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11990 fold_overflow_warning (("assuming signed overflow does "
11991 "not occur when assuming that "
11992 "(X - c) >= X is always false"),
11993 WARN_STRICT_OVERFLOW_ALL);
11994 return constant_boolean_node (0, type);
11999 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12000 This transformation affects the cases which are handled in later
12001 optimizations involving comparisons with non-negative constants. */
12002 if (TREE_CODE (arg1) == INTEGER_CST
12003 && TREE_CODE (arg0) != INTEGER_CST
12004 && tree_int_cst_sgn (arg1) > 0)
12006 if (code == GE_EXPR)
12008 arg1 = const_binop (MINUS_EXPR, arg1,
12009 build_int_cst (TREE_TYPE (arg1), 1), 0);
12010 return fold_build2 (GT_EXPR, type, arg0,
12011 fold_convert (TREE_TYPE (arg0), arg1));
12013 if (code == LT_EXPR)
12015 arg1 = const_binop (MINUS_EXPR, arg1,
12016 build_int_cst (TREE_TYPE (arg1), 1), 0);
12017 return fold_build2 (LE_EXPR, type, arg0,
12018 fold_convert (TREE_TYPE (arg0), arg1));
12022 /* Comparisons with the highest or lowest possible integer of
12023 the specified precision will have known values. */
12025 tree arg1_type = TREE_TYPE (arg1);
12026 unsigned int width = TYPE_PRECISION (arg1_type);
12028 if (TREE_CODE (arg1) == INTEGER_CST
12029 && !TREE_OVERFLOW (arg1)
12030 && width <= 2 * HOST_BITS_PER_WIDE_INT
12031 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12033 HOST_WIDE_INT signed_max_hi;
12034 unsigned HOST_WIDE_INT signed_max_lo;
12035 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12037 if (width <= HOST_BITS_PER_WIDE_INT)
12039 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12044 if (TYPE_UNSIGNED (arg1_type))
12046 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12052 max_lo = signed_max_lo;
12053 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12059 width -= HOST_BITS_PER_WIDE_INT;
12060 signed_max_lo = -1;
12061 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12066 if (TYPE_UNSIGNED (arg1_type))
12068 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12073 max_hi = signed_max_hi;
12074 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12078 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12079 && TREE_INT_CST_LOW (arg1) == max_lo)
12083 return omit_one_operand (type, integer_zero_node, arg0);
12086 return fold_build2 (EQ_EXPR, type, op0, op1);
12089 return omit_one_operand (type, integer_one_node, arg0);
12092 return fold_build2 (NE_EXPR, type, op0, op1);
12094 /* The GE_EXPR and LT_EXPR cases above are not normally
12095 reached because of previous transformations. */
12100 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12102 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12106 arg1 = const_binop (PLUS_EXPR, arg1,
12107 build_int_cst (TREE_TYPE (arg1), 1), 0);
12108 return fold_build2 (EQ_EXPR, type,
12109 fold_convert (TREE_TYPE (arg1), arg0),
12112 arg1 = const_binop (PLUS_EXPR, arg1,
12113 build_int_cst (TREE_TYPE (arg1), 1), 0);
12114 return fold_build2 (NE_EXPR, type,
12115 fold_convert (TREE_TYPE (arg1), arg0),
12120 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12122 && TREE_INT_CST_LOW (arg1) == min_lo)
12126 return omit_one_operand (type, integer_zero_node, arg0);
12129 return fold_build2 (EQ_EXPR, type, op0, op1);
12132 return omit_one_operand (type, integer_one_node, arg0);
12135 return fold_build2 (NE_EXPR, type, op0, op1);
12140 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12142 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12146 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12147 return fold_build2 (NE_EXPR, type,
12148 fold_convert (TREE_TYPE (arg1), arg0),
12151 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12152 return fold_build2 (EQ_EXPR, type,
12153 fold_convert (TREE_TYPE (arg1), arg0),
12159 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12160 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12161 && TYPE_UNSIGNED (arg1_type)
12162 /* We will flip the signedness of the comparison operator
12163 associated with the mode of arg1, so the sign bit is
12164 specified by this mode. Check that arg1 is the signed
12165 max associated with this sign bit. */
12166 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12167 /* signed_type does not work on pointer types. */
12168 && INTEGRAL_TYPE_P (arg1_type))
12170 /* The following case also applies to X < signed_max+1
12171 and X >= signed_max+1 because previous transformations. */
12172 if (code == LE_EXPR || code == GT_EXPR)
12175 st = signed_type_for (TREE_TYPE (arg1));
12176 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12177 type, fold_convert (st, arg0),
12178 build_int_cst (st, 0));
12184 /* If we are comparing an ABS_EXPR with a constant, we can
12185 convert all the cases into explicit comparisons, but they may
12186 well not be faster than doing the ABS and one comparison.
12187 But ABS (X) <= C is a range comparison, which becomes a subtraction
12188 and a comparison, and is probably faster. */
12189 if (code == LE_EXPR
12190 && TREE_CODE (arg1) == INTEGER_CST
12191 && TREE_CODE (arg0) == ABS_EXPR
12192 && ! TREE_SIDE_EFFECTS (arg0)
12193 && (0 != (tem = negate_expr (arg1)))
12194 && TREE_CODE (tem) == INTEGER_CST
12195 && !TREE_OVERFLOW (tem))
12196 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12197 build2 (GE_EXPR, type,
12198 TREE_OPERAND (arg0, 0), tem),
12199 build2 (LE_EXPR, type,
12200 TREE_OPERAND (arg0, 0), arg1));
12202 /* Convert ABS_EXPR<x> >= 0 to true. */
12203 strict_overflow_p = false;
12204 if (code == GE_EXPR
12205 && (integer_zerop (arg1)
12206 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12207 && real_zerop (arg1)))
12208 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12210 if (strict_overflow_p)
12211 fold_overflow_warning (("assuming signed overflow does not occur "
12212 "when simplifying comparison of "
12213 "absolute value and zero"),
12214 WARN_STRICT_OVERFLOW_CONDITIONAL);
12215 return omit_one_operand (type, integer_one_node, arg0);
12218 /* Convert ABS_EXPR<x> < 0 to false. */
12219 strict_overflow_p = false;
12220 if (code == LT_EXPR
12221 && (integer_zerop (arg1) || real_zerop (arg1))
12222 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12224 if (strict_overflow_p)
12225 fold_overflow_warning (("assuming signed overflow does not occur "
12226 "when simplifying comparison of "
12227 "absolute value and zero"),
12228 WARN_STRICT_OVERFLOW_CONDITIONAL);
12229 return omit_one_operand (type, integer_zero_node, arg0);
12232 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12233 and similarly for >= into !=. */
12234 if ((code == LT_EXPR || code == GE_EXPR)
12235 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12236 && TREE_CODE (arg1) == LSHIFT_EXPR
12237 && integer_onep (TREE_OPERAND (arg1, 0)))
12238 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12239 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12240 TREE_OPERAND (arg1, 1)),
12241 build_int_cst (TREE_TYPE (arg0), 0));
12243 if ((code == LT_EXPR || code == GE_EXPR)
12244 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12245 && (TREE_CODE (arg1) == NOP_EXPR
12246 || TREE_CODE (arg1) == CONVERT_EXPR)
12247 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12248 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12250 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12251 fold_convert (TREE_TYPE (arg0),
12252 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12253 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12255 build_int_cst (TREE_TYPE (arg0), 0));
12259 case UNORDERED_EXPR:
12267 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12269 t1 = fold_relational_const (code, type, arg0, arg1);
12270 if (t1 != NULL_TREE)
12274 /* If the first operand is NaN, the result is constant. */
12275 if (TREE_CODE (arg0) == REAL_CST
12276 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12277 && (code != LTGT_EXPR || ! flag_trapping_math))
12279 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12280 ? integer_zero_node
12281 : integer_one_node;
12282 return omit_one_operand (type, t1, arg1);
12285 /* If the second operand is NaN, the result is constant. */
12286 if (TREE_CODE (arg1) == REAL_CST
12287 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12288 && (code != LTGT_EXPR || ! flag_trapping_math))
12290 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12291 ? integer_zero_node
12292 : integer_one_node;
12293 return omit_one_operand (type, t1, arg0);
12296 /* Simplify unordered comparison of something with itself. */
12297 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12298 && operand_equal_p (arg0, arg1, 0))
12299 return constant_boolean_node (1, type);
12301 if (code == LTGT_EXPR
12302 && !flag_trapping_math
12303 && operand_equal_p (arg0, arg1, 0))
12304 return constant_boolean_node (0, type);
12306 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12308 tree targ0 = strip_float_extensions (arg0);
12309 tree targ1 = strip_float_extensions (arg1);
12310 tree newtype = TREE_TYPE (targ0);
12312 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12313 newtype = TREE_TYPE (targ1);
12315 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12316 return fold_build2 (code, type, fold_convert (newtype, targ0),
12317 fold_convert (newtype, targ1));
12322 case COMPOUND_EXPR:
12323 /* When pedantic, a compound expression can be neither an lvalue
12324 nor an integer constant expression. */
12325 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12327 /* Don't let (0, 0) be null pointer constant. */
12328 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12329 : fold_convert (type, arg1);
12330 return pedantic_non_lvalue (tem);
12333 if ((TREE_CODE (arg0) == REAL_CST
12334 && TREE_CODE (arg1) == REAL_CST)
12335 || (TREE_CODE (arg0) == INTEGER_CST
12336 && TREE_CODE (arg1) == INTEGER_CST))
12337 return build_complex (type, arg0, arg1);
12341 /* An ASSERT_EXPR should never be passed to fold_binary. */
12342 gcc_unreachable ();
12346 } /* switch (code) */
12349 /* Callback for walk_tree, looking for LABEL_EXPR.
12350 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12351 Do not check the sub-tree of GOTO_EXPR. */
12354 contains_label_1 (tree *tp,
12355 int *walk_subtrees,
12356 void *data ATTRIBUTE_UNUSED)
12358 switch (TREE_CODE (*tp))
12363 *walk_subtrees = 0;
12370 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12371 accessible from outside the sub-tree. Returns NULL_TREE if no
12372 addressable label is found. */
12375 contains_label_p (tree st)
12377 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12380 /* Fold a ternary expression of code CODE and type TYPE with operands
12381 OP0, OP1, and OP2. Return the folded expression if folding is
12382 successful. Otherwise, return NULL_TREE. */
12385 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12388 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12389 enum tree_code_class kind = TREE_CODE_CLASS (code);
12391 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12392 && TREE_CODE_LENGTH (code) == 3);
12394 /* Strip any conversions that don't change the mode. This is safe
12395 for every expression, except for a comparison expression because
12396 its signedness is derived from its operands. So, in the latter
12397 case, only strip conversions that don't change the signedness.
12399 Note that this is done as an internal manipulation within the
12400 constant folder, in order to find the simplest representation of
12401 the arguments so that their form can be studied. In any cases,
12402 the appropriate type conversions should be put back in the tree
12403 that will get out of the constant folder. */
12418 case COMPONENT_REF:
12419 if (TREE_CODE (arg0) == CONSTRUCTOR
12420 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12422 unsigned HOST_WIDE_INT idx;
12424 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12431 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12432 so all simple results must be passed through pedantic_non_lvalue. */
12433 if (TREE_CODE (arg0) == INTEGER_CST)
12435 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12436 tem = integer_zerop (arg0) ? op2 : op1;
12437 /* Only optimize constant conditions when the selected branch
12438 has the same type as the COND_EXPR. This avoids optimizing
12439 away "c ? x : throw", where the throw has a void type.
12440 Avoid throwing away that operand which contains label. */
12441 if ((!TREE_SIDE_EFFECTS (unused_op)
12442 || !contains_label_p (unused_op))
12443 && (! VOID_TYPE_P (TREE_TYPE (tem))
12444 || VOID_TYPE_P (type)))
12445 return pedantic_non_lvalue (tem);
12448 if (operand_equal_p (arg1, op2, 0))
12449 return pedantic_omit_one_operand (type, arg1, arg0);
12451 /* If we have A op B ? A : C, we may be able to convert this to a
12452 simpler expression, depending on the operation and the values
12453 of B and C. Signed zeros prevent all of these transformations,
12454 for reasons given above each one.
12456 Also try swapping the arguments and inverting the conditional. */
12457 if (COMPARISON_CLASS_P (arg0)
12458 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12459 arg1, TREE_OPERAND (arg0, 1))
12460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12462 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12467 if (COMPARISON_CLASS_P (arg0)
12468 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12470 TREE_OPERAND (arg0, 1))
12471 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12473 tem = fold_truth_not_expr (arg0);
12474 if (tem && COMPARISON_CLASS_P (tem))
12476 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12482 /* If the second operand is simpler than the third, swap them
12483 since that produces better jump optimization results. */
12484 if (truth_value_p (TREE_CODE (arg0))
12485 && tree_swap_operands_p (op1, op2, false))
12487 /* See if this can be inverted. If it can't, possibly because
12488 it was a floating-point inequality comparison, don't do
12490 tem = fold_truth_not_expr (arg0);
12492 return fold_build3 (code, type, tem, op2, op1);
12495 /* Convert A ? 1 : 0 to simply A. */
12496 if (integer_onep (op1)
12497 && integer_zerop (op2)
12498 /* If we try to convert OP0 to our type, the
12499 call to fold will try to move the conversion inside
12500 a COND, which will recurse. In that case, the COND_EXPR
12501 is probably the best choice, so leave it alone. */
12502 && type == TREE_TYPE (arg0))
12503 return pedantic_non_lvalue (arg0);
12505 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12506 over COND_EXPR in cases such as floating point comparisons. */
12507 if (integer_zerop (op1)
12508 && integer_onep (op2)
12509 && truth_value_p (TREE_CODE (arg0)))
12510 return pedantic_non_lvalue (fold_convert (type,
12511 invert_truthvalue (arg0)));
12513 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12514 if (TREE_CODE (arg0) == LT_EXPR
12515 && integer_zerop (TREE_OPERAND (arg0, 1))
12516 && integer_zerop (op2)
12517 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12519 /* sign_bit_p only checks ARG1 bits within A's precision.
12520 If <sign bit of A> has wider type than A, bits outside
12521 of A's precision in <sign bit of A> need to be checked.
12522 If they are all 0, this optimization needs to be done
12523 in unsigned A's type, if they are all 1 in signed A's type,
12524 otherwise this can't be done. */
12525 if (TYPE_PRECISION (TREE_TYPE (tem))
12526 < TYPE_PRECISION (TREE_TYPE (arg1))
12527 && TYPE_PRECISION (TREE_TYPE (tem))
12528 < TYPE_PRECISION (type))
12530 unsigned HOST_WIDE_INT mask_lo;
12531 HOST_WIDE_INT mask_hi;
12532 int inner_width, outer_width;
12535 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12536 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12537 if (outer_width > TYPE_PRECISION (type))
12538 outer_width = TYPE_PRECISION (type);
12540 if (outer_width > HOST_BITS_PER_WIDE_INT)
12542 mask_hi = ((unsigned HOST_WIDE_INT) -1
12543 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12549 mask_lo = ((unsigned HOST_WIDE_INT) -1
12550 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12552 if (inner_width > HOST_BITS_PER_WIDE_INT)
12554 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12555 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12559 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12560 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12562 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12563 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12565 tem_type = signed_type_for (TREE_TYPE (tem));
12566 tem = fold_convert (tem_type, tem);
12568 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12569 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12571 tem_type = unsigned_type_for (TREE_TYPE (tem));
12572 tem = fold_convert (tem_type, tem);
12579 return fold_convert (type,
12580 fold_build2 (BIT_AND_EXPR,
12581 TREE_TYPE (tem), tem,
12582 fold_convert (TREE_TYPE (tem),
12586 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12587 already handled above. */
12588 if (TREE_CODE (arg0) == BIT_AND_EXPR
12589 && integer_onep (TREE_OPERAND (arg0, 1))
12590 && integer_zerop (op2)
12591 && integer_pow2p (arg1))
12593 tree tem = TREE_OPERAND (arg0, 0);
12595 if (TREE_CODE (tem) == RSHIFT_EXPR
12596 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12597 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12598 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12599 return fold_build2 (BIT_AND_EXPR, type,
12600 TREE_OPERAND (tem, 0), arg1);
12603 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12604 is probably obsolete because the first operand should be a
12605 truth value (that's why we have the two cases above), but let's
12606 leave it in until we can confirm this for all front-ends. */
12607 if (integer_zerop (op2)
12608 && TREE_CODE (arg0) == NE_EXPR
12609 && integer_zerop (TREE_OPERAND (arg0, 1))
12610 && integer_pow2p (arg1)
12611 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12612 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12613 arg1, OEP_ONLY_CONST))
12614 return pedantic_non_lvalue (fold_convert (type,
12615 TREE_OPERAND (arg0, 0)));
12617 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12618 if (integer_zerop (op2)
12619 && truth_value_p (TREE_CODE (arg0))
12620 && truth_value_p (TREE_CODE (arg1)))
12621 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12622 fold_convert (type, arg0),
12625 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12626 if (integer_onep (op2)
12627 && truth_value_p (TREE_CODE (arg0))
12628 && truth_value_p (TREE_CODE (arg1)))
12630 /* Only perform transformation if ARG0 is easily inverted. */
12631 tem = fold_truth_not_expr (arg0);
12633 return fold_build2 (TRUTH_ORIF_EXPR, type,
12634 fold_convert (type, tem),
12638 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12639 if (integer_zerop (arg1)
12640 && truth_value_p (TREE_CODE (arg0))
12641 && truth_value_p (TREE_CODE (op2)))
12643 /* Only perform transformation if ARG0 is easily inverted. */
12644 tem = fold_truth_not_expr (arg0);
12646 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12647 fold_convert (type, tem),
12651 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12652 if (integer_onep (arg1)
12653 && truth_value_p (TREE_CODE (arg0))
12654 && truth_value_p (TREE_CODE (op2)))
12655 return fold_build2 (TRUTH_ORIF_EXPR, type,
12656 fold_convert (type, arg0),
12662 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12663 of fold_ternary on them. */
12664 gcc_unreachable ();
12666 case BIT_FIELD_REF:
12667 if ((TREE_CODE (arg0) == VECTOR_CST
12668 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12669 && type == TREE_TYPE (TREE_TYPE (arg0))
12670 && host_integerp (arg1, 1)
12671 && host_integerp (op2, 1))
12673 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12674 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12677 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12678 && (idx % width) == 0
12679 && (idx = idx / width)
12680 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12682 tree elements = NULL_TREE;
12684 if (TREE_CODE (arg0) == VECTOR_CST)
12685 elements = TREE_VECTOR_CST_ELTS (arg0);
12688 unsigned HOST_WIDE_INT idx;
12691 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12692 elements = tree_cons (NULL_TREE, value, elements);
12694 while (idx-- > 0 && elements)
12695 elements = TREE_CHAIN (elements);
12697 return TREE_VALUE (elements);
12699 return fold_convert (type, integer_zero_node);
12706 } /* switch (code) */
12709 /* Perform constant folding and related simplification of EXPR.
12710 The related simplifications include x*1 => x, x*0 => 0, etc.,
12711 and application of the associative law.
12712 NOP_EXPR conversions may be removed freely (as long as we
12713 are careful not to change the type of the overall expression).
12714 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12715 but we can constant-fold them if they have constant operands. */
12717 #ifdef ENABLE_FOLD_CHECKING
12718 # define fold(x) fold_1 (x)
12719 static tree fold_1 (tree);
12725 const tree t = expr;
12726 enum tree_code code = TREE_CODE (t);
12727 enum tree_code_class kind = TREE_CODE_CLASS (code);
12730 /* Return right away if a constant. */
12731 if (kind == tcc_constant)
12734 /* CALL_EXPR-like objects with variable numbers of operands are
12735 treated specially. */
12736 if (kind == tcc_vl_exp)
12738 if (code == CALL_EXPR)
12740 tem = fold_call_expr (expr, false);
12741 return tem ? tem : expr;
12746 if (IS_EXPR_CODE_CLASS (kind)
12747 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12749 tree type = TREE_TYPE (t);
12750 tree op0, op1, op2;
12752 switch (TREE_CODE_LENGTH (code))
12755 op0 = TREE_OPERAND (t, 0);
12756 tem = fold_unary (code, type, op0);
12757 return tem ? tem : expr;
12759 op0 = TREE_OPERAND (t, 0);
12760 op1 = TREE_OPERAND (t, 1);
12761 tem = fold_binary (code, type, op0, op1);
12762 return tem ? tem : expr;
12764 op0 = TREE_OPERAND (t, 0);
12765 op1 = TREE_OPERAND (t, 1);
12766 op2 = TREE_OPERAND (t, 2);
12767 tem = fold_ternary (code, type, op0, op1, op2);
12768 return tem ? tem : expr;
12777 return fold (DECL_INITIAL (t));
12781 } /* switch (code) */
12784 #ifdef ENABLE_FOLD_CHECKING
12787 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12788 static void fold_check_failed (tree, tree);
12789 void print_fold_checksum (tree);
12791 /* When --enable-checking=fold, compute a digest of expr before
12792 and after actual fold call to see if fold did not accidentally
12793 change original expr. */
12799 struct md5_ctx ctx;
12800 unsigned char checksum_before[16], checksum_after[16];
12803 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12804 md5_init_ctx (&ctx);
12805 fold_checksum_tree (expr, &ctx, ht);
12806 md5_finish_ctx (&ctx, checksum_before);
12809 ret = fold_1 (expr);
12811 md5_init_ctx (&ctx);
12812 fold_checksum_tree (expr, &ctx, ht);
12813 md5_finish_ctx (&ctx, checksum_after);
12816 if (memcmp (checksum_before, checksum_after, 16))
12817 fold_check_failed (expr, ret);
12823 print_fold_checksum (tree expr)
12825 struct md5_ctx ctx;
12826 unsigned char checksum[16], cnt;
12829 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12830 md5_init_ctx (&ctx);
12831 fold_checksum_tree (expr, &ctx, ht);
12832 md5_finish_ctx (&ctx, checksum);
12834 for (cnt = 0; cnt < 16; ++cnt)
12835 fprintf (stderr, "%02x", checksum[cnt]);
12836 putc ('\n', stderr);
12840 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12842 internal_error ("fold check: original tree changed by fold");
12846 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12849 enum tree_code code;
12850 struct tree_function_decl buf;
12855 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12856 <= sizeof (struct tree_function_decl))
12857 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12860 slot = htab_find_slot (ht, expr, INSERT);
12864 code = TREE_CODE (expr);
12865 if (TREE_CODE_CLASS (code) == tcc_declaration
12866 && DECL_ASSEMBLER_NAME_SET_P (expr))
12868 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12869 memcpy ((char *) &buf, expr, tree_size (expr));
12870 expr = (tree) &buf;
12871 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12873 else if (TREE_CODE_CLASS (code) == tcc_type
12874 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12875 || TYPE_CACHED_VALUES_P (expr)
12876 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12878 /* Allow these fields to be modified. */
12879 memcpy ((char *) &buf, expr, tree_size (expr));
12880 expr = (tree) &buf;
12881 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12882 TYPE_POINTER_TO (expr) = NULL;
12883 TYPE_REFERENCE_TO (expr) = NULL;
12884 if (TYPE_CACHED_VALUES_P (expr))
12886 TYPE_CACHED_VALUES_P (expr) = 0;
12887 TYPE_CACHED_VALUES (expr) = NULL;
12890 md5_process_bytes (expr, tree_size (expr), ctx);
12891 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12892 if (TREE_CODE_CLASS (code) != tcc_type
12893 && TREE_CODE_CLASS (code) != tcc_declaration
12894 && code != TREE_LIST
12895 && code != SSA_NAME)
12896 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12897 switch (TREE_CODE_CLASS (code))
12903 md5_process_bytes (TREE_STRING_POINTER (expr),
12904 TREE_STRING_LENGTH (expr), ctx);
12907 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12908 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12911 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12917 case tcc_exceptional:
12921 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12922 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12923 expr = TREE_CHAIN (expr);
12924 goto recursive_label;
12927 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12928 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12934 case tcc_expression:
12935 case tcc_reference:
12936 case tcc_comparison:
12939 case tcc_statement:
12941 len = TREE_OPERAND_LENGTH (expr);
12942 for (i = 0; i < len; ++i)
12943 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12945 case tcc_declaration:
12946 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12947 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12948 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12950 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12951 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12952 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12953 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12954 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12956 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12957 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12959 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12961 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12962 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12963 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12967 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12968 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12969 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12970 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12971 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12972 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12973 if (INTEGRAL_TYPE_P (expr)
12974 || SCALAR_FLOAT_TYPE_P (expr))
12976 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12977 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12979 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12980 if (TREE_CODE (expr) == RECORD_TYPE
12981 || TREE_CODE (expr) == UNION_TYPE
12982 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12983 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12984 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12991 /* Helper function for outputting the checksum of a tree T. When
12992 debugging with gdb, you can "define mynext" to be "next" followed
12993 by "call debug_fold_checksum (op0)", then just trace down till the
12997 debug_fold_checksum (tree t)
13000 unsigned char checksum[16];
13001 struct md5_ctx ctx;
13002 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13004 md5_init_ctx (&ctx);
13005 fold_checksum_tree (t, &ctx, ht);
13006 md5_finish_ctx (&ctx, checksum);
13009 for (i = 0; i < 16; i++)
13010 fprintf (stderr, "%d ", checksum[i]);
13012 fprintf (stderr, "\n");
13017 /* Fold a unary tree expression with code CODE of type TYPE with an
13018 operand OP0. Return a folded expression if successful. Otherwise,
13019 return a tree expression with code CODE of type TYPE with an
13023 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13026 #ifdef ENABLE_FOLD_CHECKING
13027 unsigned char checksum_before[16], checksum_after[16];
13028 struct md5_ctx ctx;
13031 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13032 md5_init_ctx (&ctx);
13033 fold_checksum_tree (op0, &ctx, ht);
13034 md5_finish_ctx (&ctx, checksum_before);
13038 tem = fold_unary (code, type, op0);
13040 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13042 #ifdef ENABLE_FOLD_CHECKING
13043 md5_init_ctx (&ctx);
13044 fold_checksum_tree (op0, &ctx, ht);
13045 md5_finish_ctx (&ctx, checksum_after);
13048 if (memcmp (checksum_before, checksum_after, 16))
13049 fold_check_failed (op0, tem);
13054 /* Fold a binary tree expression with code CODE of type TYPE with
13055 operands OP0 and OP1. Return a folded expression if successful.
13056 Otherwise, return a tree expression with code CODE of type TYPE
13057 with operands OP0 and OP1. */
13060 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13064 #ifdef ENABLE_FOLD_CHECKING
13065 unsigned char checksum_before_op0[16],
13066 checksum_before_op1[16],
13067 checksum_after_op0[16],
13068 checksum_after_op1[16];
13069 struct md5_ctx ctx;
13072 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13073 md5_init_ctx (&ctx);
13074 fold_checksum_tree (op0, &ctx, ht);
13075 md5_finish_ctx (&ctx, checksum_before_op0);
13078 md5_init_ctx (&ctx);
13079 fold_checksum_tree (op1, &ctx, ht);
13080 md5_finish_ctx (&ctx, checksum_before_op1);
13084 tem = fold_binary (code, type, op0, op1);
13086 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13088 #ifdef ENABLE_FOLD_CHECKING
13089 md5_init_ctx (&ctx);
13090 fold_checksum_tree (op0, &ctx, ht);
13091 md5_finish_ctx (&ctx, checksum_after_op0);
13094 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13095 fold_check_failed (op0, tem);
13097 md5_init_ctx (&ctx);
13098 fold_checksum_tree (op1, &ctx, ht);
13099 md5_finish_ctx (&ctx, checksum_after_op1);
13102 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13103 fold_check_failed (op1, tem);
13108 /* Fold a ternary tree expression with code CODE of type TYPE with
13109 operands OP0, OP1, and OP2. Return a folded expression if
13110 successful. Otherwise, return a tree expression with code CODE of
13111 type TYPE with operands OP0, OP1, and OP2. */
13114 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13118 #ifdef ENABLE_FOLD_CHECKING
13119 unsigned char checksum_before_op0[16],
13120 checksum_before_op1[16],
13121 checksum_before_op2[16],
13122 checksum_after_op0[16],
13123 checksum_after_op1[16],
13124 checksum_after_op2[16];
13125 struct md5_ctx ctx;
13128 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13129 md5_init_ctx (&ctx);
13130 fold_checksum_tree (op0, &ctx, ht);
13131 md5_finish_ctx (&ctx, checksum_before_op0);
13134 md5_init_ctx (&ctx);
13135 fold_checksum_tree (op1, &ctx, ht);
13136 md5_finish_ctx (&ctx, checksum_before_op1);
13139 md5_init_ctx (&ctx);
13140 fold_checksum_tree (op2, &ctx, ht);
13141 md5_finish_ctx (&ctx, checksum_before_op2);
13145 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13146 tem = fold_ternary (code, type, op0, op1, op2);
13148 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13150 #ifdef ENABLE_FOLD_CHECKING
13151 md5_init_ctx (&ctx);
13152 fold_checksum_tree (op0, &ctx, ht);
13153 md5_finish_ctx (&ctx, checksum_after_op0);
13156 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13157 fold_check_failed (op0, tem);
13159 md5_init_ctx (&ctx);
13160 fold_checksum_tree (op1, &ctx, ht);
13161 md5_finish_ctx (&ctx, checksum_after_op1);
13164 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13165 fold_check_failed (op1, tem);
13167 md5_init_ctx (&ctx);
13168 fold_checksum_tree (op2, &ctx, ht);
13169 md5_finish_ctx (&ctx, checksum_after_op2);
13172 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13173 fold_check_failed (op2, tem);
13178 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13179 arguments in ARGARRAY, and a null static chain.
13180 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13181 of type TYPE from the given operands as constructed by build_call_array. */
13184 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13187 #ifdef ENABLE_FOLD_CHECKING
13188 unsigned char checksum_before_fn[16],
13189 checksum_before_arglist[16],
13190 checksum_after_fn[16],
13191 checksum_after_arglist[16];
13192 struct md5_ctx ctx;
13196 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13197 md5_init_ctx (&ctx);
13198 fold_checksum_tree (fn, &ctx, ht);
13199 md5_finish_ctx (&ctx, checksum_before_fn);
13202 md5_init_ctx (&ctx);
13203 for (i = 0; i < nargs; i++)
13204 fold_checksum_tree (argarray[i], &ctx, ht);
13205 md5_finish_ctx (&ctx, checksum_before_arglist);
13209 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13211 #ifdef ENABLE_FOLD_CHECKING
13212 md5_init_ctx (&ctx);
13213 fold_checksum_tree (fn, &ctx, ht);
13214 md5_finish_ctx (&ctx, checksum_after_fn);
13217 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13218 fold_check_failed (fn, tem);
13220 md5_init_ctx (&ctx);
13221 for (i = 0; i < nargs; i++)
13222 fold_checksum_tree (argarray[i], &ctx, ht);
13223 md5_finish_ctx (&ctx, checksum_after_arglist);
13226 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13227 fold_check_failed (NULL_TREE, tem);
13232 /* Perform constant folding and related simplification of initializer
13233 expression EXPR. These behave identically to "fold_buildN" but ignore
13234 potential run-time traps and exceptions that fold must preserve. */
13236 #define START_FOLD_INIT \
13237 int saved_signaling_nans = flag_signaling_nans;\
13238 int saved_trapping_math = flag_trapping_math;\
13239 int saved_rounding_math = flag_rounding_math;\
13240 int saved_trapv = flag_trapv;\
13241 int saved_folding_initializer = folding_initializer;\
13242 flag_signaling_nans = 0;\
13243 flag_trapping_math = 0;\
13244 flag_rounding_math = 0;\
13246 folding_initializer = 1;
13248 #define END_FOLD_INIT \
13249 flag_signaling_nans = saved_signaling_nans;\
13250 flag_trapping_math = saved_trapping_math;\
13251 flag_rounding_math = saved_rounding_math;\
13252 flag_trapv = saved_trapv;\
13253 folding_initializer = saved_folding_initializer;
13256 fold_build1_initializer (enum tree_code code, tree type, tree op)
13261 result = fold_build1 (code, type, op);
13268 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13273 result = fold_build2 (code, type, op0, op1);
13280 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13286 result = fold_build3 (code, type, op0, op1, op2);
13293 fold_build_call_array_initializer (tree type, tree fn,
13294 int nargs, tree *argarray)
13299 result = fold_build_call_array (type, fn, nargs, argarray);
13305 #undef START_FOLD_INIT
13306 #undef END_FOLD_INIT
13308 /* Determine if first argument is a multiple of second argument. Return 0 if
13309 it is not, or we cannot easily determined it to be.
13311 An example of the sort of thing we care about (at this point; this routine
13312 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13313 fold cases do now) is discovering that
13315 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13321 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13323 This code also handles discovering that
13325 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13327 is a multiple of 8 so we don't have to worry about dealing with a
13328 possible remainder.
13330 Note that we *look* inside a SAVE_EXPR only to determine how it was
13331 calculated; it is not safe for fold to do much of anything else with the
13332 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13333 at run time. For example, the latter example above *cannot* be implemented
13334 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13335 evaluation time of the original SAVE_EXPR is not necessarily the same at
13336 the time the new expression is evaluated. The only optimization of this
13337 sort that would be valid is changing
13339 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13343 SAVE_EXPR (I) * SAVE_EXPR (J)
13345 (where the same SAVE_EXPR (J) is used in the original and the
13346 transformed version). */
13349 multiple_of_p (tree type, tree top, tree bottom)
13351 if (operand_equal_p (top, bottom, 0))
13354 if (TREE_CODE (type) != INTEGER_TYPE)
13357 switch (TREE_CODE (top))
13360 /* Bitwise and provides a power of two multiple. If the mask is
13361 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13362 if (!integer_pow2p (bottom))
13367 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13368 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13372 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13373 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13376 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13380 op1 = TREE_OPERAND (top, 1);
13381 /* const_binop may not detect overflow correctly,
13382 so check for it explicitly here. */
13383 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13384 > TREE_INT_CST_LOW (op1)
13385 && TREE_INT_CST_HIGH (op1) == 0
13386 && 0 != (t1 = fold_convert (type,
13387 const_binop (LSHIFT_EXPR,
13390 && !TREE_OVERFLOW (t1))
13391 return multiple_of_p (type, t1, bottom);
13396 /* Can't handle conversions from non-integral or wider integral type. */
13397 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13398 || (TYPE_PRECISION (type)
13399 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13402 /* .. fall through ... */
13405 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13408 if (TREE_CODE (bottom) != INTEGER_CST
13409 || integer_zerop (bottom)
13410 || (TYPE_UNSIGNED (type)
13411 && (tree_int_cst_sgn (top) < 0
13412 || tree_int_cst_sgn (bottom) < 0)))
13414 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13422 /* Return true if `t' is known to be non-negative. If the return
13423 value is based on the assumption that signed overflow is undefined,
13424 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13425 *STRICT_OVERFLOW_P. */
13428 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13430 if (t == error_mark_node)
13433 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13436 switch (TREE_CODE (t))
13439 /* Query VRP to see if it has recorded any information about
13440 the range of this object. */
13441 return ssa_name_nonnegative_p (t);
13444 /* We can't return 1 if flag_wrapv is set because
13445 ABS_EXPR<INT_MIN> = INT_MIN. */
13446 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13448 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13450 *strict_overflow_p = true;
13456 return tree_int_cst_sgn (t) >= 0;
13459 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13461 case POINTER_PLUS_EXPR:
13463 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13464 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13466 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13467 strict_overflow_p));
13469 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13470 both unsigned and at least 2 bits shorter than the result. */
13471 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13472 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13473 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13475 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13476 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13477 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13478 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13480 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13481 TYPE_PRECISION (inner2)) + 1;
13482 return prec < TYPE_PRECISION (TREE_TYPE (t));
13488 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13490 /* x * x for floating point x is always non-negative. */
13491 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13493 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13495 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13496 strict_overflow_p));
13499 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13500 both unsigned and their total bits is shorter than the result. */
13501 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13502 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13503 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13505 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13506 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13507 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13508 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13509 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13510 < TYPE_PRECISION (TREE_TYPE (t));
13516 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13518 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13519 strict_overflow_p));
13525 case TRUNC_DIV_EXPR:
13526 case CEIL_DIV_EXPR:
13527 case FLOOR_DIV_EXPR:
13528 case ROUND_DIV_EXPR:
13529 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13531 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13532 strict_overflow_p));
13534 case TRUNC_MOD_EXPR:
13535 case CEIL_MOD_EXPR:
13536 case FLOOR_MOD_EXPR:
13537 case ROUND_MOD_EXPR:
13539 case NON_LVALUE_EXPR:
13541 case FIX_TRUNC_EXPR:
13542 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13543 strict_overflow_p);
13545 case COMPOUND_EXPR:
13547 case GIMPLE_MODIFY_STMT:
13548 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13549 strict_overflow_p);
13552 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13553 strict_overflow_p);
13556 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13558 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13559 strict_overflow_p));
13563 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13564 tree outer_type = TREE_TYPE (t);
13566 if (TREE_CODE (outer_type) == REAL_TYPE)
13568 if (TREE_CODE (inner_type) == REAL_TYPE)
13569 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13570 strict_overflow_p);
13571 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13573 if (TYPE_UNSIGNED (inner_type))
13575 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13576 strict_overflow_p);
13579 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13581 if (TREE_CODE (inner_type) == REAL_TYPE)
13582 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13583 strict_overflow_p);
13584 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13585 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13586 && TYPE_UNSIGNED (inner_type);
13593 tree temp = TARGET_EXPR_SLOT (t);
13594 t = TARGET_EXPR_INITIAL (t);
13596 /* If the initializer is non-void, then it's a normal expression
13597 that will be assigned to the slot. */
13598 if (!VOID_TYPE_P (t))
13599 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13601 /* Otherwise, the initializer sets the slot in some way. One common
13602 way is an assignment statement at the end of the initializer. */
13605 if (TREE_CODE (t) == BIND_EXPR)
13606 t = expr_last (BIND_EXPR_BODY (t));
13607 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13608 || TREE_CODE (t) == TRY_CATCH_EXPR)
13609 t = expr_last (TREE_OPERAND (t, 0));
13610 else if (TREE_CODE (t) == STATEMENT_LIST)
13615 if ((TREE_CODE (t) == MODIFY_EXPR
13616 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13617 && GENERIC_TREE_OPERAND (t, 0) == temp)
13618 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13619 strict_overflow_p);
13626 tree fndecl = get_callee_fndecl (t);
13627 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13628 switch (DECL_FUNCTION_CODE (fndecl))
13630 CASE_FLT_FN (BUILT_IN_ACOS):
13631 CASE_FLT_FN (BUILT_IN_ACOSH):
13632 CASE_FLT_FN (BUILT_IN_CABS):
13633 CASE_FLT_FN (BUILT_IN_COSH):
13634 CASE_FLT_FN (BUILT_IN_ERFC):
13635 CASE_FLT_FN (BUILT_IN_EXP):
13636 CASE_FLT_FN (BUILT_IN_EXP10):
13637 CASE_FLT_FN (BUILT_IN_EXP2):
13638 CASE_FLT_FN (BUILT_IN_FABS):
13639 CASE_FLT_FN (BUILT_IN_FDIM):
13640 CASE_FLT_FN (BUILT_IN_HYPOT):
13641 CASE_FLT_FN (BUILT_IN_POW10):
13642 CASE_INT_FN (BUILT_IN_FFS):
13643 CASE_INT_FN (BUILT_IN_PARITY):
13644 CASE_INT_FN (BUILT_IN_POPCOUNT):
13645 case BUILT_IN_BSWAP32:
13646 case BUILT_IN_BSWAP64:
13650 CASE_FLT_FN (BUILT_IN_SQRT):
13651 /* sqrt(-0.0) is -0.0. */
13652 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13654 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13655 strict_overflow_p);
13657 CASE_FLT_FN (BUILT_IN_ASINH):
13658 CASE_FLT_FN (BUILT_IN_ATAN):
13659 CASE_FLT_FN (BUILT_IN_ATANH):
13660 CASE_FLT_FN (BUILT_IN_CBRT):
13661 CASE_FLT_FN (BUILT_IN_CEIL):
13662 CASE_FLT_FN (BUILT_IN_ERF):
13663 CASE_FLT_FN (BUILT_IN_EXPM1):
13664 CASE_FLT_FN (BUILT_IN_FLOOR):
13665 CASE_FLT_FN (BUILT_IN_FMOD):
13666 CASE_FLT_FN (BUILT_IN_FREXP):
13667 CASE_FLT_FN (BUILT_IN_LCEIL):
13668 CASE_FLT_FN (BUILT_IN_LDEXP):
13669 CASE_FLT_FN (BUILT_IN_LFLOOR):
13670 CASE_FLT_FN (BUILT_IN_LLCEIL):
13671 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13672 CASE_FLT_FN (BUILT_IN_LLRINT):
13673 CASE_FLT_FN (BUILT_IN_LLROUND):
13674 CASE_FLT_FN (BUILT_IN_LRINT):
13675 CASE_FLT_FN (BUILT_IN_LROUND):
13676 CASE_FLT_FN (BUILT_IN_MODF):
13677 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13678 CASE_FLT_FN (BUILT_IN_RINT):
13679 CASE_FLT_FN (BUILT_IN_ROUND):
13680 CASE_FLT_FN (BUILT_IN_SCALB):
13681 CASE_FLT_FN (BUILT_IN_SCALBLN):
13682 CASE_FLT_FN (BUILT_IN_SCALBN):
13683 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13684 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13685 CASE_FLT_FN (BUILT_IN_SINH):
13686 CASE_FLT_FN (BUILT_IN_TANH):
13687 CASE_FLT_FN (BUILT_IN_TRUNC):
13688 /* True if the 1st argument is nonnegative. */
13689 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13690 strict_overflow_p);
13692 CASE_FLT_FN (BUILT_IN_FMAX):
13693 /* True if the 1st OR 2nd arguments are nonnegative. */
13694 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13696 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13697 strict_overflow_p)));
13699 CASE_FLT_FN (BUILT_IN_FMIN):
13700 /* True if the 1st AND 2nd arguments are nonnegative. */
13701 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13703 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13704 strict_overflow_p)));
13706 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13707 /* True if the 2nd argument is nonnegative. */
13708 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13709 strict_overflow_p);
13711 CASE_FLT_FN (BUILT_IN_POWI):
13712 /* True if the 1st argument is nonnegative or the second
13713 argument is an even integer. */
13714 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13716 tree arg1 = CALL_EXPR_ARG (t, 1);
13717 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13720 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13721 strict_overflow_p);
13723 CASE_FLT_FN (BUILT_IN_POW):
13724 /* True if the 1st argument is nonnegative or the second
13725 argument is an even integer valued real. */
13726 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13731 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13732 n = real_to_integer (&c);
13735 REAL_VALUE_TYPE cint;
13736 real_from_integer (&cint, VOIDmode, n,
13737 n < 0 ? -1 : 0, 0);
13738 if (real_identical (&c, &cint))
13742 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13743 strict_overflow_p);
13750 /* ... fall through ... */
13754 tree type = TREE_TYPE (t);
13755 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13756 && truth_value_p (TREE_CODE (t)))
13757 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13758 have a signed:1 type (where the value is -1 and 0). */
13763 /* We don't know sign of `t', so be conservative and return false. */
13767 /* Return true if `t' is known to be non-negative. Handle warnings
13768 about undefined signed overflow. */
13771 tree_expr_nonnegative_p (tree t)
13773 bool ret, strict_overflow_p;
13775 strict_overflow_p = false;
13776 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13777 if (strict_overflow_p)
13778 fold_overflow_warning (("assuming signed overflow does not occur when "
13779 "determining that expression is always "
13781 WARN_STRICT_OVERFLOW_MISC);
13785 /* Return true when T is an address and is known to be nonzero.
13786 For floating point we further ensure that T is not denormal.
13787 Similar logic is present in nonzero_address in rtlanal.h.
13789 If the return value is based on the assumption that signed overflow
13790 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13791 change *STRICT_OVERFLOW_P. */
13794 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13796 tree type = TREE_TYPE (t);
13797 bool sub_strict_overflow_p;
13799 /* Doing something useful for floating point would need more work. */
13800 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13803 switch (TREE_CODE (t))
13806 /* Query VRP to see if it has recorded any information about
13807 the range of this object. */
13808 return ssa_name_nonzero_p (t);
13811 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13812 strict_overflow_p);
13815 return !integer_zerop (t);
13817 case POINTER_PLUS_EXPR:
13819 if (TYPE_OVERFLOW_UNDEFINED (type))
13821 /* With the presence of negative values it is hard
13822 to say something. */
13823 sub_strict_overflow_p = false;
13824 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13825 &sub_strict_overflow_p)
13826 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13827 &sub_strict_overflow_p))
13829 /* One of operands must be positive and the other non-negative. */
13830 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13831 overflows, on a twos-complement machine the sum of two
13832 nonnegative numbers can never be zero. */
13833 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13835 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13836 strict_overflow_p));
13841 if (TYPE_OVERFLOW_UNDEFINED (type))
13843 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13845 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13846 strict_overflow_p))
13848 *strict_overflow_p = true;
13856 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13857 tree outer_type = TREE_TYPE (t);
13859 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13860 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13861 strict_overflow_p));
13867 tree base = get_base_address (TREE_OPERAND (t, 0));
13872 /* Weak declarations may link to NULL. */
13873 if (VAR_OR_FUNCTION_DECL_P (base))
13874 return !DECL_WEAK (base);
13876 /* Constants are never weak. */
13877 if (CONSTANT_CLASS_P (base))
13884 sub_strict_overflow_p = false;
13885 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13886 &sub_strict_overflow_p)
13887 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13888 &sub_strict_overflow_p))
13890 if (sub_strict_overflow_p)
13891 *strict_overflow_p = true;
13897 sub_strict_overflow_p = false;
13898 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13899 &sub_strict_overflow_p)
13900 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13901 &sub_strict_overflow_p))
13903 if (sub_strict_overflow_p)
13904 *strict_overflow_p = true;
13909 sub_strict_overflow_p = false;
13910 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13911 &sub_strict_overflow_p))
13913 if (sub_strict_overflow_p)
13914 *strict_overflow_p = true;
13916 /* When both operands are nonzero, then MAX must be too. */
13917 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13918 strict_overflow_p))
13921 /* MAX where operand 0 is positive is positive. */
13922 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13923 strict_overflow_p);
13925 /* MAX where operand 1 is positive is positive. */
13926 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13927 &sub_strict_overflow_p)
13928 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13929 &sub_strict_overflow_p))
13931 if (sub_strict_overflow_p)
13932 *strict_overflow_p = true;
13937 case COMPOUND_EXPR:
13939 case GIMPLE_MODIFY_STMT:
13941 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13942 strict_overflow_p);
13945 case NON_LVALUE_EXPR:
13946 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13947 strict_overflow_p);
13950 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13952 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13953 strict_overflow_p));
13956 return alloca_call_p (t);
13964 /* Return true when T is an address and is known to be nonzero.
13965 Handle warnings about undefined signed overflow. */
13968 tree_expr_nonzero_p (tree t)
13970 bool ret, strict_overflow_p;
13972 strict_overflow_p = false;
13973 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13974 if (strict_overflow_p)
13975 fold_overflow_warning (("assuming signed overflow does not occur when "
13976 "determining that expression is always "
13978 WARN_STRICT_OVERFLOW_MISC);
13982 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13983 attempt to fold the expression to a constant without modifying TYPE,
13986 If the expression could be simplified to a constant, then return
13987 the constant. If the expression would not be simplified to a
13988 constant, then return NULL_TREE. */
13991 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13993 tree tem = fold_binary (code, type, op0, op1);
13994 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13997 /* Given the components of a unary expression CODE, TYPE and OP0,
13998 attempt to fold the expression to a constant without modifying
14001 If the expression could be simplified to a constant, then return
14002 the constant. If the expression would not be simplified to a
14003 constant, then return NULL_TREE. */
14006 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14008 tree tem = fold_unary (code, type, op0);
14009 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14012 /* If EXP represents referencing an element in a constant string
14013 (either via pointer arithmetic or array indexing), return the
14014 tree representing the value accessed, otherwise return NULL. */
14017 fold_read_from_constant_string (tree exp)
14019 if ((TREE_CODE (exp) == INDIRECT_REF
14020 || TREE_CODE (exp) == ARRAY_REF)
14021 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14023 tree exp1 = TREE_OPERAND (exp, 0);
14027 if (TREE_CODE (exp) == INDIRECT_REF)
14028 string = string_constant (exp1, &index);
14031 tree low_bound = array_ref_low_bound (exp);
14032 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14034 /* Optimize the special-case of a zero lower bound.
14036 We convert the low_bound to sizetype to avoid some problems
14037 with constant folding. (E.g. suppose the lower bound is 1,
14038 and its mode is QI. Without the conversion,l (ARRAY
14039 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14040 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14041 if (! integer_zerop (low_bound))
14042 index = size_diffop (index, fold_convert (sizetype, low_bound));
14048 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14049 && TREE_CODE (string) == STRING_CST
14050 && TREE_CODE (index) == INTEGER_CST
14051 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14052 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14054 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14055 return fold_convert (TREE_TYPE (exp),
14056 build_int_cst (NULL_TREE,
14057 (TREE_STRING_POINTER (string)
14058 [TREE_INT_CST_LOW (index)])));
14063 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14064 an integer constant or real constant.
14066 TYPE is the type of the result. */
14069 fold_negate_const (tree arg0, tree type)
14071 tree t = NULL_TREE;
14073 switch (TREE_CODE (arg0))
14077 unsigned HOST_WIDE_INT low;
14078 HOST_WIDE_INT high;
14079 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14080 TREE_INT_CST_HIGH (arg0),
14082 t = force_fit_type_double (type, low, high, 1,
14083 (overflow | TREE_OVERFLOW (arg0))
14084 && !TYPE_UNSIGNED (type));
14089 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14093 gcc_unreachable ();
14099 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14100 an integer constant or real constant.
14102 TYPE is the type of the result. */
14105 fold_abs_const (tree arg0, tree type)
14107 tree t = NULL_TREE;
14109 switch (TREE_CODE (arg0))
14112 /* If the value is unsigned, then the absolute value is
14113 the same as the ordinary value. */
14114 if (TYPE_UNSIGNED (type))
14116 /* Similarly, if the value is non-negative. */
14117 else if (INT_CST_LT (integer_minus_one_node, arg0))
14119 /* If the value is negative, then the absolute value is
14123 unsigned HOST_WIDE_INT low;
14124 HOST_WIDE_INT high;
14125 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14126 TREE_INT_CST_HIGH (arg0),
14128 t = force_fit_type_double (type, low, high, -1,
14129 overflow | TREE_OVERFLOW (arg0));
14134 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14135 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14141 gcc_unreachable ();
14147 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14148 constant. TYPE is the type of the result. */
14151 fold_not_const (tree arg0, tree type)
14153 tree t = NULL_TREE;
14155 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14157 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14158 ~TREE_INT_CST_HIGH (arg0), 0,
14159 TREE_OVERFLOW (arg0));
14164 /* Given CODE, a relational operator, the target type, TYPE and two
14165 constant operands OP0 and OP1, return the result of the
14166 relational operation. If the result is not a compile time
14167 constant, then return NULL_TREE. */
14170 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14172 int result, invert;
14174 /* From here on, the only cases we handle are when the result is
14175 known to be a constant. */
14177 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14179 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14180 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14182 /* Handle the cases where either operand is a NaN. */
14183 if (real_isnan (c0) || real_isnan (c1))
14193 case UNORDERED_EXPR:
14207 if (flag_trapping_math)
14213 gcc_unreachable ();
14216 return constant_boolean_node (result, type);
14219 return constant_boolean_node (real_compare (code, c0, c1), type);
14222 /* Handle equality/inequality of complex constants. */
14223 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14225 tree rcond = fold_relational_const (code, type,
14226 TREE_REALPART (op0),
14227 TREE_REALPART (op1));
14228 tree icond = fold_relational_const (code, type,
14229 TREE_IMAGPART (op0),
14230 TREE_IMAGPART (op1));
14231 if (code == EQ_EXPR)
14232 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14233 else if (code == NE_EXPR)
14234 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14239 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14241 To compute GT, swap the arguments and do LT.
14242 To compute GE, do LT and invert the result.
14243 To compute LE, swap the arguments, do LT and invert the result.
14244 To compute NE, do EQ and invert the result.
14246 Therefore, the code below must handle only EQ and LT. */
14248 if (code == LE_EXPR || code == GT_EXPR)
14253 code = swap_tree_comparison (code);
14256 /* Note that it is safe to invert for real values here because we
14257 have already handled the one case that it matters. */
14260 if (code == NE_EXPR || code == GE_EXPR)
14263 code = invert_tree_comparison (code, false);
14266 /* Compute a result for LT or EQ if args permit;
14267 Otherwise return T. */
14268 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14270 if (code == EQ_EXPR)
14271 result = tree_int_cst_equal (op0, op1);
14272 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14273 result = INT_CST_LT_UNSIGNED (op0, op1);
14275 result = INT_CST_LT (op0, op1);
14282 return constant_boolean_node (result, type);
14285 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14286 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14290 fold_build_cleanup_point_expr (tree type, tree expr)
14292 /* If the expression does not have side effects then we don't have to wrap
14293 it with a cleanup point expression. */
14294 if (!TREE_SIDE_EFFECTS (expr))
14297 /* If the expression is a return, check to see if the expression inside the
14298 return has no side effects or the right hand side of the modify expression
14299 inside the return. If either don't have side effects set we don't need to
14300 wrap the expression in a cleanup point expression. Note we don't check the
14301 left hand side of the modify because it should always be a return decl. */
14302 if (TREE_CODE (expr) == RETURN_EXPR)
14304 tree op = TREE_OPERAND (expr, 0);
14305 if (!op || !TREE_SIDE_EFFECTS (op))
14307 op = TREE_OPERAND (op, 1);
14308 if (!TREE_SIDE_EFFECTS (op))
14312 return build1 (CLEANUP_POINT_EXPR, type, expr);
14315 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14316 of an indirection through OP0, or NULL_TREE if no simplification is
14320 fold_indirect_ref_1 (tree type, tree op0)
14326 subtype = TREE_TYPE (sub);
14327 if (!POINTER_TYPE_P (subtype))
14330 if (TREE_CODE (sub) == ADDR_EXPR)
14332 tree op = TREE_OPERAND (sub, 0);
14333 tree optype = TREE_TYPE (op);
14334 /* *&CONST_DECL -> to the value of the const decl. */
14335 if (TREE_CODE (op) == CONST_DECL)
14336 return DECL_INITIAL (op);
14337 /* *&p => p; make sure to handle *&"str"[cst] here. */
14338 if (type == optype)
14340 tree fop = fold_read_from_constant_string (op);
14346 /* *(foo *)&fooarray => fooarray[0] */
14347 else if (TREE_CODE (optype) == ARRAY_TYPE
14348 && type == TREE_TYPE (optype))
14350 tree type_domain = TYPE_DOMAIN (optype);
14351 tree min_val = size_zero_node;
14352 if (type_domain && TYPE_MIN_VALUE (type_domain))
14353 min_val = TYPE_MIN_VALUE (type_domain);
14354 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14356 /* *(foo *)&complexfoo => __real__ complexfoo */
14357 else if (TREE_CODE (optype) == COMPLEX_TYPE
14358 && type == TREE_TYPE (optype))
14359 return fold_build1 (REALPART_EXPR, type, op);
14360 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14361 else if (TREE_CODE (optype) == VECTOR_TYPE
14362 && type == TREE_TYPE (optype))
14364 tree part_width = TYPE_SIZE (type);
14365 tree index = bitsize_int (0);
14366 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14370 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14371 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14372 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14374 tree op00 = TREE_OPERAND (sub, 0);
14375 tree op01 = TREE_OPERAND (sub, 1);
14379 op00type = TREE_TYPE (op00);
14380 if (TREE_CODE (op00) == ADDR_EXPR
14381 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14382 && type == TREE_TYPE (TREE_TYPE (op00type)))
14384 tree size = TYPE_SIZE_UNIT (type);
14385 if (tree_int_cst_equal (size, op01))
14386 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14390 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14391 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14392 && type == TREE_TYPE (TREE_TYPE (subtype)))
14395 tree min_val = size_zero_node;
14396 sub = build_fold_indirect_ref (sub);
14397 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14398 if (type_domain && TYPE_MIN_VALUE (type_domain))
14399 min_val = TYPE_MIN_VALUE (type_domain);
14400 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14406 /* Builds an expression for an indirection through T, simplifying some
14410 build_fold_indirect_ref (tree t)
14412 tree type = TREE_TYPE (TREE_TYPE (t));
14413 tree sub = fold_indirect_ref_1 (type, t);
14418 return build1 (INDIRECT_REF, type, t);
14421 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14424 fold_indirect_ref (tree t)
14426 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14434 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14435 whose result is ignored. The type of the returned tree need not be
14436 the same as the original expression. */
14439 fold_ignored_result (tree t)
14441 if (!TREE_SIDE_EFFECTS (t))
14442 return integer_zero_node;
14445 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14448 t = TREE_OPERAND (t, 0);
14452 case tcc_comparison:
14453 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14454 t = TREE_OPERAND (t, 0);
14455 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14456 t = TREE_OPERAND (t, 1);
14461 case tcc_expression:
14462 switch (TREE_CODE (t))
14464 case COMPOUND_EXPR:
14465 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14467 t = TREE_OPERAND (t, 0);
14471 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14472 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14474 t = TREE_OPERAND (t, 0);
14487 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14488 This can only be applied to objects of a sizetype. */
14491 round_up (tree value, int divisor)
14493 tree div = NULL_TREE;
14495 gcc_assert (divisor > 0);
14499 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14500 have to do anything. Only do this when we are not given a const,
14501 because in that case, this check is more expensive than just
14503 if (TREE_CODE (value) != INTEGER_CST)
14505 div = build_int_cst (TREE_TYPE (value), divisor);
14507 if (multiple_of_p (TREE_TYPE (value), value, div))
14511 /* If divisor is a power of two, simplify this to bit manipulation. */
14512 if (divisor == (divisor & -divisor))
14514 if (TREE_CODE (value) == INTEGER_CST)
14516 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14517 unsigned HOST_WIDE_INT high;
14520 if ((low & (divisor - 1)) == 0)
14523 overflow_p = TREE_OVERFLOW (value);
14524 high = TREE_INT_CST_HIGH (value);
14525 low &= ~(divisor - 1);
14534 return force_fit_type_double (TREE_TYPE (value), low, high,
14541 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14542 value = size_binop (PLUS_EXPR, value, t);
14543 t = build_int_cst (TREE_TYPE (value), -divisor);
14544 value = size_binop (BIT_AND_EXPR, value, t);
14550 div = build_int_cst (TREE_TYPE (value), divisor);
14551 value = size_binop (CEIL_DIV_EXPR, value, div);
14552 value = size_binop (MULT_EXPR, value, div);
14558 /* Likewise, but round down. */
14561 round_down (tree value, int divisor)
14563 tree div = NULL_TREE;
14565 gcc_assert (divisor > 0);
14569 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14570 have to do anything. Only do this when we are not given a const,
14571 because in that case, this check is more expensive than just
14573 if (TREE_CODE (value) != INTEGER_CST)
14575 div = build_int_cst (TREE_TYPE (value), divisor);
14577 if (multiple_of_p (TREE_TYPE (value), value, div))
14581 /* If divisor is a power of two, simplify this to bit manipulation. */
14582 if (divisor == (divisor & -divisor))
14586 t = build_int_cst (TREE_TYPE (value), -divisor);
14587 value = size_binop (BIT_AND_EXPR, value, t);
14592 div = build_int_cst (TREE_TYPE (value), divisor);
14593 value = size_binop (FLOOR_DIV_EXPR, value, div);
14594 value = size_binop (MULT_EXPR, value, div);
14600 /* Returns the pointer to the base of the object addressed by EXP and
14601 extracts the information about the offset of the access, storing it
14602 to PBITPOS and POFFSET. */
14605 split_address_to_core_and_offset (tree exp,
14606 HOST_WIDE_INT *pbitpos, tree *poffset)
14609 enum machine_mode mode;
14610 int unsignedp, volatilep;
14611 HOST_WIDE_INT bitsize;
14613 if (TREE_CODE (exp) == ADDR_EXPR)
14615 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14616 poffset, &mode, &unsignedp, &volatilep,
14618 core = fold_addr_expr (core);
14624 *poffset = NULL_TREE;
14630 /* Returns true if addresses of E1 and E2 differ by a constant, false
14631 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14634 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14637 HOST_WIDE_INT bitpos1, bitpos2;
14638 tree toffset1, toffset2, tdiff, type;
14640 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14641 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14643 if (bitpos1 % BITS_PER_UNIT != 0
14644 || bitpos2 % BITS_PER_UNIT != 0
14645 || !operand_equal_p (core1, core2, 0))
14648 if (toffset1 && toffset2)
14650 type = TREE_TYPE (toffset1);
14651 if (type != TREE_TYPE (toffset2))
14652 toffset2 = fold_convert (type, toffset2);
14654 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14655 if (!cst_and_fits_in_hwi (tdiff))
14658 *diff = int_cst_value (tdiff);
14660 else if (toffset1 || toffset2)
14662 /* If only one of the offsets is non-constant, the difference cannot
14669 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14673 /* Simplify the floating point expression EXP when the sign of the
14674 result is not significant. Return NULL_TREE if no simplification
14678 fold_strip_sign_ops (tree exp)
14682 switch (TREE_CODE (exp))
14686 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14687 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14691 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14693 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14694 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14695 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14696 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14697 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14698 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14701 case COMPOUND_EXPR:
14702 arg0 = TREE_OPERAND (exp, 0);
14703 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14705 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14709 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14710 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14712 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14713 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14714 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14719 const enum built_in_function fcode = builtin_mathfn_code (exp);
14722 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14723 /* Strip copysign function call, return the 1st argument. */
14724 arg0 = CALL_EXPR_ARG (exp, 0);
14725 arg1 = CALL_EXPR_ARG (exp, 1);
14726 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14729 /* Strip sign ops from the argument of "odd" math functions. */
14730 if (negate_mathfn_p (fcode))
14732 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14734 return build_call_expr (get_callee_fndecl (exp), 1, arg0);