1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
285 int sign_extended_type;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 unsigned HOST_WIDE_INT l;
331 h = h1 + h2 + (l < l1);
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
355 return (*hv & h1) < 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
388 memset (prod, 0, sizeof prod);
390 for (i = 0; i < 4; i++)
393 for (j = 0; j < 4; j++)
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
409 /* Unsigned overflow is immediate. */
411 return (toplow | tophigh) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
439 unsigned HOST_WIDE_INT signmask;
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
447 if (SHIFT_COUNT_TRUNCATED)
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
457 else if (count >= HOST_BITS_PER_WIDE_INT)
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
469 /* Sign extend all bits that are beyond the precision. */
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 unsigned HOST_WIDE_INT signmask;
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 if (SHIFT_COUNT_TRUNCATED)
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
518 else if (count >= HOST_BITS_PER_WIDE_INT)
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count >= (HOST_WIDE_INT)prec)
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
645 neg_double (lden, hden, &lden, &hden);
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
652 /* This unsigned division rounds toward zero. */
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
666 memset (quo, 0, sizeof quo);
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale = BASE / (den[den_hi_sig] + 1);
705 { /* scale divisor and dividend */
707 for (i = 0; i <= 4 - 1; i++)
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
716 for (i = 0; i <= 4 - 1; i++)
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
754 for (j = 0; j <= den_hi_sig; j++)
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
776 num [num_hi_sig] += carry;
779 /* Store the quotient digit. */
784 decode (quo, lquo, hquo);
787 /* If result is negative, make it so. */
789 neg_double (*lquo, *hquo, lquo, hquo);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
827 case ROUND_MOD_EXPR: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
834 /* Get absolute values. */
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
838 neg_double (lden, hden, &labs_den, &habs_den);
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, <wice, &htwice);
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den < ltwice)))
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
880 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
910 return build_int_cst_wide (type, quol, quoh);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
922 static int fold_deferring_overflow_warnings;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
955 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
964 if (fold_deferred_overflow_warning != NULL
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
974 if (!issue || warnmsg == NULL)
977 /* Use the smallest code level when deciding to issue the
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 gcc_assert (!flag_wrapv && !flag_trapv);
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1199 return negate_expr_p (tem);
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 tree rpart = negate_expr (TREE_REALPART (t));
1264 tree ipart = negate_expr (TREE_IMAGPART (t));
1266 if ((TREE_CODE (rpart) == REAL_CST
1267 && TREE_CODE (ipart) == REAL_CST)
1268 || (TREE_CODE (rpart) == INTEGER_CST
1269 && TREE_CODE (ipart) == INTEGER_CST))
1270 return build_complex (type, rpart, ipart);
1275 if (negate_expr_p (t))
1276 return fold_build2 (COMPLEX_EXPR, type,
1277 fold_negate_expr (TREE_OPERAND (t, 0)),
1278 fold_negate_expr (TREE_OPERAND (t, 1)));
1282 if (negate_expr_p (t))
1283 return fold_build1 (CONJ_EXPR, type,
1284 fold_negate_expr (TREE_OPERAND (t, 0)));
1288 return TREE_OPERAND (t, 0);
1291 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1292 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1294 /* -(A + B) -> (-B) - A. */
1295 if (negate_expr_p (TREE_OPERAND (t, 1))
1296 && reorder_operands_p (TREE_OPERAND (t, 0),
1297 TREE_OPERAND (t, 1)))
1299 tem = negate_expr (TREE_OPERAND (t, 1));
1300 return fold_build2 (MINUS_EXPR, type,
1301 tem, TREE_OPERAND (t, 0));
1304 /* -(A + B) -> (-A) - B. */
1305 if (negate_expr_p (TREE_OPERAND (t, 0)))
1307 tem = negate_expr (TREE_OPERAND (t, 0));
1308 return fold_build2 (MINUS_EXPR, type,
1309 tem, TREE_OPERAND (t, 1));
1315 /* - (A - B) -> B - A */
1316 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1317 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1318 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1319 return fold_build2 (MINUS_EXPR, type,
1320 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1324 if (TYPE_UNSIGNED (type))
1330 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1332 tem = TREE_OPERAND (t, 1);
1333 if (negate_expr_p (tem))
1334 return fold_build2 (TREE_CODE (t), type,
1335 TREE_OPERAND (t, 0), negate_expr (tem));
1336 tem = TREE_OPERAND (t, 0);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 negate_expr (tem), TREE_OPERAND (t, 1));
1343 case TRUNC_DIV_EXPR:
1344 case ROUND_DIV_EXPR:
1345 case FLOOR_DIV_EXPR:
1347 case EXACT_DIV_EXPR:
1348 /* In general we can't negate A / B, because if A is INT_MIN and
1349 B is 1, we may turn this into INT_MIN / -1 which is undefined
1350 and actually traps on some architectures. But if overflow is
1351 undefined, we can negate, because - (INT_MIN / 1) is an
1353 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1355 const char * const warnmsg = G_("assuming signed overflow does not "
1356 "occur when negating a division");
1357 tem = TREE_OPERAND (t, 1);
1358 if (negate_expr_p (tem))
1360 if (INTEGRAL_TYPE_P (type)
1361 && (TREE_CODE (tem) != INTEGER_CST
1362 || integer_onep (tem)))
1363 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1364 return fold_build2 (TREE_CODE (t), type,
1365 TREE_OPERAND (t, 0), negate_expr (tem));
1367 tem = TREE_OPERAND (t, 0);
1368 if (negate_expr_p (tem))
1370 if (INTEGRAL_TYPE_P (type)
1371 && (TREE_CODE (tem) != INTEGER_CST
1372 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1373 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1374 return fold_build2 (TREE_CODE (t), type,
1375 negate_expr (tem), TREE_OPERAND (t, 1));
1381 /* Convert -((double)float) into (double)(-float). */
1382 if (TREE_CODE (type) == REAL_TYPE)
1384 tem = strip_float_extensions (t);
1385 if (tem != t && negate_expr_p (tem))
1386 return negate_expr (tem);
1391 /* Negate -f(x) as f(-x). */
1392 if (negate_mathfn_p (builtin_mathfn_code (t))
1393 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1397 fndecl = get_callee_fndecl (t);
1398 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1399 return build_call_expr (fndecl, 1, arg);
1404 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1405 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1407 tree op1 = TREE_OPERAND (t, 1);
1408 if (TREE_INT_CST_HIGH (op1) == 0
1409 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1410 == TREE_INT_CST_LOW (op1))
1412 tree ntype = TYPE_UNSIGNED (type)
1413 ? signed_type_for (type)
1414 : unsigned_type_for (type);
1415 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1416 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1417 return fold_convert (type, temp);
1429 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1430 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1431 return NULL_TREE. */
1434 negate_expr (tree t)
1441 type = TREE_TYPE (t);
1442 STRIP_SIGN_NOPS (t);
1444 tem = fold_negate_expr (t);
1446 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1447 return fold_convert (type, tem);
1450 /* Split a tree IN into a constant, literal and variable parts that could be
1451 combined with CODE to make IN. "constant" means an expression with
1452 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1453 commutative arithmetic operation. Store the constant part into *CONP,
1454 the literal in *LITP and return the variable part. If a part isn't
1455 present, set it to null. If the tree does not decompose in this way,
1456 return the entire tree as the variable part and the other parts as null.
1458 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1459 case, we negate an operand that was subtracted. Except if it is a
1460 literal for which we use *MINUS_LITP instead.
1462 If NEGATE_P is true, we are negating all of IN, again except a literal
1463 for which we use *MINUS_LITP instead.
1465 If IN is itself a literal or constant, return it as appropriate.
1467 Note that we do not guarantee that any of the three values will be the
1468 same type as IN, but they will have the same signedness and mode. */
1471 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1472 tree *minus_litp, int negate_p)
1480 /* Strip any conversions that don't change the machine mode or signedness. */
1481 STRIP_SIGN_NOPS (in);
1483 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1485 else if (TREE_CODE (in) == code
1486 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1487 /* We can associate addition and subtraction together (even
1488 though the C standard doesn't say so) for integers because
1489 the value is not affected. For reals, the value might be
1490 affected, so we can't. */
1491 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1492 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1494 tree op0 = TREE_OPERAND (in, 0);
1495 tree op1 = TREE_OPERAND (in, 1);
1496 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1497 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1499 /* First see if either of the operands is a literal, then a constant. */
1500 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1501 *litp = op0, op0 = 0;
1502 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1503 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1505 if (op0 != 0 && TREE_CONSTANT (op0))
1506 *conp = op0, op0 = 0;
1507 else if (op1 != 0 && TREE_CONSTANT (op1))
1508 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1510 /* If we haven't dealt with either operand, this is not a case we can
1511 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1512 if (op0 != 0 && op1 != 0)
1517 var = op1, neg_var_p = neg1_p;
1519 /* Now do any needed negations. */
1521 *minus_litp = *litp, *litp = 0;
1523 *conp = negate_expr (*conp);
1525 var = negate_expr (var);
1527 else if (TREE_CONSTANT (in))
1535 *minus_litp = *litp, *litp = 0;
1536 else if (*minus_litp)
1537 *litp = *minus_litp, *minus_litp = 0;
1538 *conp = negate_expr (*conp);
1539 var = negate_expr (var);
1545 /* Re-associate trees split by the above function. T1 and T2 are either
1546 expressions to associate or null. Return the new expression, if any. If
1547 we build an operation, do it in TYPE and with CODE. */
1550 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1557 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1558 try to fold this since we will have infinite recursion. But do
1559 deal with any NEGATE_EXPRs. */
1560 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1561 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1563 if (code == PLUS_EXPR)
1565 if (TREE_CODE (t1) == NEGATE_EXPR)
1566 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1567 fold_convert (type, TREE_OPERAND (t1, 0)));
1568 else if (TREE_CODE (t2) == NEGATE_EXPR)
1569 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1570 fold_convert (type, TREE_OPERAND (t2, 0)));
1571 else if (integer_zerop (t2))
1572 return fold_convert (type, t1);
1574 else if (code == MINUS_EXPR)
1576 if (integer_zerop (t2))
1577 return fold_convert (type, t1);
1580 return build2 (code, type, fold_convert (type, t1),
1581 fold_convert (type, t2));
1584 return fold_build2 (code, type, fold_convert (type, t1),
1585 fold_convert (type, t2));
1588 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1589 for use in int_const_binop, size_binop and size_diffop. */
1592 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1594 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1596 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1611 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1612 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1613 && TYPE_MODE (type1) == TYPE_MODE (type2);
1617 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1618 to produce a new constant. Return NULL_TREE if we don't know how
1619 to evaluate CODE at compile-time.
1621 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1624 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1626 unsigned HOST_WIDE_INT int1l, int2l;
1627 HOST_WIDE_INT int1h, int2h;
1628 unsigned HOST_WIDE_INT low;
1630 unsigned HOST_WIDE_INT garbagel;
1631 HOST_WIDE_INT garbageh;
1633 tree type = TREE_TYPE (arg1);
1634 int uns = TYPE_UNSIGNED (type);
1636 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1639 int1l = TREE_INT_CST_LOW (arg1);
1640 int1h = TREE_INT_CST_HIGH (arg1);
1641 int2l = TREE_INT_CST_LOW (arg2);
1642 int2h = TREE_INT_CST_HIGH (arg2);
1647 low = int1l | int2l, hi = int1h | int2h;
1651 low = int1l ^ int2l, hi = int1h ^ int2h;
1655 low = int1l & int2l, hi = int1h & int2h;
1661 /* It's unclear from the C standard whether shifts can overflow.
1662 The following code ignores overflow; perhaps a C standard
1663 interpretation ruling is needed. */
1664 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1671 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1676 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1680 neg_double (int2l, int2h, &low, &hi);
1681 add_double (int1l, int1h, low, hi, &low, &hi);
1682 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1686 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 case TRUNC_DIV_EXPR:
1690 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1691 case EXACT_DIV_EXPR:
1692 /* This is a shortcut for a common special case. */
1693 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1694 && !TREE_OVERFLOW (arg1)
1695 && !TREE_OVERFLOW (arg2)
1696 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1698 if (code == CEIL_DIV_EXPR)
1701 low = int1l / int2l, hi = 0;
1705 /* ... fall through ... */
1707 case ROUND_DIV_EXPR:
1708 if (int2h == 0 && int2l == 0)
1710 if (int2h == 0 && int2l == 1)
1712 low = int1l, hi = int1h;
1715 if (int1l == int2l && int1h == int2h
1716 && ! (int1l == 0 && int1h == 0))
1721 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1722 &low, &hi, &garbagel, &garbageh);
1725 case TRUNC_MOD_EXPR:
1726 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1727 /* This is a shortcut for a common special case. */
1728 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1729 && !TREE_OVERFLOW (arg1)
1730 && !TREE_OVERFLOW (arg2)
1731 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1733 if (code == CEIL_MOD_EXPR)
1735 low = int1l % int2l, hi = 0;
1739 /* ... fall through ... */
1741 case ROUND_MOD_EXPR:
1742 if (int2h == 0 && int2l == 0)
1744 overflow = div_and_round_double (code, uns,
1745 int1l, int1h, int2l, int2h,
1746 &garbagel, &garbageh, &low, &hi);
1752 low = (((unsigned HOST_WIDE_INT) int1h
1753 < (unsigned HOST_WIDE_INT) int2h)
1754 || (((unsigned HOST_WIDE_INT) int1h
1755 == (unsigned HOST_WIDE_INT) int2h)
1758 low = (int1h < int2h
1759 || (int1h == int2h && int1l < int2l));
1761 if (low == (code == MIN_EXPR))
1762 low = int1l, hi = int1h;
1764 low = int2l, hi = int2h;
1773 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1775 /* Propagate overflow flags ourselves. */
1776 if (((!uns || is_sizetype) && overflow)
1777 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1780 TREE_OVERFLOW (t) = 1;
1784 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1785 ((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1791 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1792 constant. We assume ARG1 and ARG2 have the same data type, or at least
1793 are the same kind of constant and the same machine mode. Return zero if
1794 combining the constants is not allowed in the current operating mode.
1796 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1799 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1801 /* Sanity check for the recursive cases. */
1808 if (TREE_CODE (arg1) == INTEGER_CST)
1809 return int_const_binop (code, arg1, arg2, notrunc);
1811 if (TREE_CODE (arg1) == REAL_CST)
1813 enum machine_mode mode;
1816 REAL_VALUE_TYPE value;
1817 REAL_VALUE_TYPE result;
1821 /* The following codes are handled by real_arithmetic. */
1836 d1 = TREE_REAL_CST (arg1);
1837 d2 = TREE_REAL_CST (arg2);
1839 type = TREE_TYPE (arg1);
1840 mode = TYPE_MODE (type);
1842 /* Don't perform operation if we honor signaling NaNs and
1843 either operand is a NaN. */
1844 if (HONOR_SNANS (mode)
1845 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1848 /* Don't perform operation if it would raise a division
1849 by zero exception. */
1850 if (code == RDIV_EXPR
1851 && REAL_VALUES_EQUAL (d2, dconst0)
1852 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1855 /* If either operand is a NaN, just return it. Otherwise, set up
1856 for floating-point trap; we return an overflow. */
1857 if (REAL_VALUE_ISNAN (d1))
1859 else if (REAL_VALUE_ISNAN (d2))
1862 inexact = real_arithmetic (&value, code, &d1, &d2);
1863 real_convert (&result, mode, &value);
1865 /* Don't constant fold this floating point operation if
1866 the result has overflowed and flag_trapping_math. */
1867 if (flag_trapping_math
1868 && MODE_HAS_INFINITIES (mode)
1869 && REAL_VALUE_ISINF (result)
1870 && !REAL_VALUE_ISINF (d1)
1871 && !REAL_VALUE_ISINF (d2))
1874 /* Don't constant fold this floating point operation if the
1875 result may dependent upon the run-time rounding mode and
1876 flag_rounding_math is set, or if GCC's software emulation
1877 is unable to accurately represent the result. */
1878 if ((flag_rounding_math
1879 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1880 && !flag_unsafe_math_optimizations))
1881 && (inexact || !real_identical (&result, &value)))
1884 t = build_real (type, result);
1886 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1890 if (TREE_CODE (arg1) == COMPLEX_CST)
1892 tree type = TREE_TYPE (arg1);
1893 tree r1 = TREE_REALPART (arg1);
1894 tree i1 = TREE_IMAGPART (arg1);
1895 tree r2 = TREE_REALPART (arg2);
1896 tree i2 = TREE_IMAGPART (arg2);
1903 real = const_binop (code, r1, r2, notrunc);
1904 imag = const_binop (code, i1, i2, notrunc);
1908 real = const_binop (MINUS_EXPR,
1909 const_binop (MULT_EXPR, r1, r2, notrunc),
1910 const_binop (MULT_EXPR, i1, i2, notrunc),
1912 imag = const_binop (PLUS_EXPR,
1913 const_binop (MULT_EXPR, r1, i2, notrunc),
1914 const_binop (MULT_EXPR, i1, r2, notrunc),
1921 = const_binop (PLUS_EXPR,
1922 const_binop (MULT_EXPR, r2, r2, notrunc),
1923 const_binop (MULT_EXPR, i2, i2, notrunc),
1926 = const_binop (PLUS_EXPR,
1927 const_binop (MULT_EXPR, r1, r2, notrunc),
1928 const_binop (MULT_EXPR, i1, i2, notrunc),
1931 = const_binop (MINUS_EXPR,
1932 const_binop (MULT_EXPR, i1, r2, notrunc),
1933 const_binop (MULT_EXPR, r1, i2, notrunc),
1936 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1937 code = TRUNC_DIV_EXPR;
1939 real = const_binop (code, t1, magsquared, notrunc);
1940 imag = const_binop (code, t2, magsquared, notrunc);
1949 return build_complex (type, real, imag);
1955 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1956 indicates which particular sizetype to create. */
1959 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1961 return build_int_cst (sizetype_tab[(int) kind], number);
1964 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1965 is a tree code. The type of the result is taken from the operands.
1966 Both must be equivalent integer types, ala int_binop_types_match_p.
1967 If the operands are constant, so is the result. */
1970 size_binop (enum tree_code code, tree arg0, tree arg1)
1972 tree type = TREE_TYPE (arg0);
1974 if (arg0 == error_mark_node || arg1 == error_mark_node)
1975 return error_mark_node;
1977 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1980 /* Handle the special case of two integer constants faster. */
1981 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1983 /* And some specific cases even faster than that. */
1984 if (code == PLUS_EXPR)
1986 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1988 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1991 else if (code == MINUS_EXPR)
1993 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1996 else if (code == MULT_EXPR)
1998 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2002 /* Handle general case of two integer constants. */
2003 return int_const_binop (code, arg0, arg1, 0);
2006 return fold_build2 (code, type, arg0, arg1);
2009 /* Given two values, either both of sizetype or both of bitsizetype,
2010 compute the difference between the two values. Return the value
2011 in signed type corresponding to the type of the operands. */
2014 size_diffop (tree arg0, tree arg1)
2016 tree type = TREE_TYPE (arg0);
2019 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2022 /* If the type is already signed, just do the simple thing. */
2023 if (!TYPE_UNSIGNED (type))
2024 return size_binop (MINUS_EXPR, arg0, arg1);
2026 if (type == sizetype)
2028 else if (type == bitsizetype)
2029 ctype = sbitsizetype;
2031 ctype = signed_type_for (type);
2033 /* If either operand is not a constant, do the conversions to the signed
2034 type and subtract. The hardware will do the right thing with any
2035 overflow in the subtraction. */
2036 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2037 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2038 fold_convert (ctype, arg1));
2040 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2041 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2042 overflow) and negate (which can't either). Special-case a result
2043 of zero while we're here. */
2044 if (tree_int_cst_equal (arg0, arg1))
2045 return build_int_cst (ctype, 0);
2046 else if (tree_int_cst_lt (arg1, arg0))
2047 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2049 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2050 fold_convert (ctype, size_binop (MINUS_EXPR,
2054 /* A subroutine of fold_convert_const handling conversions of an
2055 INTEGER_CST to another integer type. */
2058 fold_convert_const_int_from_int (tree type, tree arg1)
2062 /* Given an integer constant, make new constant with new type,
2063 appropriately sign-extended or truncated. */
2064 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2065 TREE_INT_CST_HIGH (arg1),
2066 /* Don't set the overflow when
2067 converting a pointer */
2068 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2069 (TREE_INT_CST_HIGH (arg1) < 0
2070 && (TYPE_UNSIGNED (type)
2071 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2072 | TREE_OVERFLOW (arg1));
2077 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2078 to an integer type. */
2081 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2086 /* The following code implements the floating point to integer
2087 conversion rules required by the Java Language Specification,
2088 that IEEE NaNs are mapped to zero and values that overflow
2089 the target precision saturate, i.e. values greater than
2090 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2091 are mapped to INT_MIN. These semantics are allowed by the
2092 C and C++ standards that simply state that the behavior of
2093 FP-to-integer conversion is unspecified upon overflow. */
2095 HOST_WIDE_INT high, low;
2097 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2101 case FIX_TRUNC_EXPR:
2102 real_trunc (&r, VOIDmode, &x);
2109 /* If R is NaN, return zero and show we have an overflow. */
2110 if (REAL_VALUE_ISNAN (r))
2117 /* See if R is less than the lower bound or greater than the
2122 tree lt = TYPE_MIN_VALUE (type);
2123 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2124 if (REAL_VALUES_LESS (r, l))
2127 high = TREE_INT_CST_HIGH (lt);
2128 low = TREE_INT_CST_LOW (lt);
2134 tree ut = TYPE_MAX_VALUE (type);
2137 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2138 if (REAL_VALUES_LESS (u, r))
2141 high = TREE_INT_CST_HIGH (ut);
2142 low = TREE_INT_CST_LOW (ut);
2148 REAL_VALUE_TO_INT (&low, &high, r);
2150 t = force_fit_type_double (type, low, high, -1,
2151 overflow | TREE_OVERFLOW (arg1));
2155 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2156 to another floating point type. */
2159 fold_convert_const_real_from_real (tree type, tree arg1)
2161 REAL_VALUE_TYPE value;
2164 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2165 t = build_real (type, value);
2167 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2171 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2172 type TYPE. If no simplification can be done return NULL_TREE. */
2175 fold_convert_const (enum tree_code code, tree type, tree arg1)
2177 if (TREE_TYPE (arg1) == type)
2180 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2182 if (TREE_CODE (arg1) == INTEGER_CST)
2183 return fold_convert_const_int_from_int (type, arg1);
2184 else if (TREE_CODE (arg1) == REAL_CST)
2185 return fold_convert_const_int_from_real (code, type, arg1);
2187 else if (TREE_CODE (type) == REAL_TYPE)
2189 if (TREE_CODE (arg1) == INTEGER_CST)
2190 return build_real_from_int_cst (type, arg1);
2191 if (TREE_CODE (arg1) == REAL_CST)
2192 return fold_convert_const_real_from_real (type, arg1);
2197 /* Construct a vector of zero elements of vector type TYPE. */
2200 build_zero_vector (tree type)
2205 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2206 units = TYPE_VECTOR_SUBPARTS (type);
2209 for (i = 0; i < units; i++)
2210 list = tree_cons (NULL_TREE, elem, list);
2211 return build_vector (type, list);
2214 /* Convert expression ARG to type TYPE. Used by the middle-end for
2215 simple conversions in preference to calling the front-end's convert. */
2218 fold_convert (tree type, tree arg)
2220 tree orig = TREE_TYPE (arg);
2226 if (TREE_CODE (arg) == ERROR_MARK
2227 || TREE_CODE (type) == ERROR_MARK
2228 || TREE_CODE (orig) == ERROR_MARK)
2229 return error_mark_node;
2231 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2232 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2233 TYPE_MAIN_VARIANT (orig)))
2234 return fold_build1 (NOP_EXPR, type, arg);
2236 switch (TREE_CODE (type))
2238 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2239 case POINTER_TYPE: case REFERENCE_TYPE:
2241 if (TREE_CODE (arg) == INTEGER_CST)
2243 tem = fold_convert_const (NOP_EXPR, type, arg);
2244 if (tem != NULL_TREE)
2247 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2248 || TREE_CODE (orig) == OFFSET_TYPE)
2249 return fold_build1 (NOP_EXPR, type, arg);
2250 if (TREE_CODE (orig) == COMPLEX_TYPE)
2252 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2253 return fold_convert (type, tem);
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1 (NOP_EXPR, type, arg);
2260 if (TREE_CODE (arg) == INTEGER_CST)
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2266 else if (TREE_CODE (arg) == REAL_CST)
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2273 switch (TREE_CODE (orig))
2276 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2277 case POINTER_TYPE: case REFERENCE_TYPE:
2278 return fold_build1 (FLOAT_EXPR, type, arg);
2281 return fold_build1 (NOP_EXPR, type, arg);
2284 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2285 return fold_convert (type, tem);
2292 switch (TREE_CODE (orig))
2295 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2296 case POINTER_TYPE: case REFERENCE_TYPE:
2298 return build2 (COMPLEX_EXPR, type,
2299 fold_convert (TREE_TYPE (type), arg),
2300 fold_convert (TREE_TYPE (type), integer_zero_node));
2305 if (TREE_CODE (arg) == COMPLEX_EXPR)
2307 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2308 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2309 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2312 arg = save_expr (arg);
2313 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2314 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2315 rpart = fold_convert (TREE_TYPE (type), rpart);
2316 ipart = fold_convert (TREE_TYPE (type), ipart);
2317 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2325 if (integer_zerop (arg))
2326 return build_zero_vector (type);
2327 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2328 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2329 || TREE_CODE (orig) == VECTOR_TYPE);
2330 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2333 tem = fold_ignored_result (arg);
2334 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2336 return fold_build1 (NOP_EXPR, type, tem);
2343 /* Return false if expr can be assumed not to be an lvalue, true
2347 maybe_lvalue_p (tree x)
2349 /* We only need to wrap lvalue tree codes. */
2350 switch (TREE_CODE (x))
2361 case ALIGN_INDIRECT_REF:
2362 case MISALIGNED_INDIRECT_REF:
2364 case ARRAY_RANGE_REF:
2370 case PREINCREMENT_EXPR:
2371 case PREDECREMENT_EXPR:
2373 case TRY_CATCH_EXPR:
2374 case WITH_CLEANUP_EXPR:
2377 case GIMPLE_MODIFY_STMT:
2386 /* Assume the worst for front-end tree codes. */
2387 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2395 /* Return an expr equal to X but certainly not valid as an lvalue. */
2400 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2405 if (! maybe_lvalue_p (x))
2407 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2410 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2411 Zero means allow extended lvalues. */
2413 int pedantic_lvalues;
2415 /* When pedantic, return an expr equal to X but certainly not valid as a
2416 pedantic lvalue. Otherwise, return X. */
2419 pedantic_non_lvalue (tree x)
2421 if (pedantic_lvalues)
2422 return non_lvalue (x);
2427 /* Given a tree comparison code, return the code that is the logical inverse
2428 of the given code. It is not safe to do this for floating-point
2429 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2430 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2433 invert_tree_comparison (enum tree_code code, bool honor_nans)
2435 if (honor_nans && flag_trapping_math)
2445 return honor_nans ? UNLE_EXPR : LE_EXPR;
2447 return honor_nans ? UNLT_EXPR : LT_EXPR;
2449 return honor_nans ? UNGE_EXPR : GE_EXPR;
2451 return honor_nans ? UNGT_EXPR : GT_EXPR;
2465 return UNORDERED_EXPR;
2466 case UNORDERED_EXPR:
2467 return ORDERED_EXPR;
2473 /* Similar, but return the comparison that results if the operands are
2474 swapped. This is safe for floating-point. */
2477 swap_tree_comparison (enum tree_code code)
2484 case UNORDERED_EXPR:
2510 /* Convert a comparison tree code from an enum tree_code representation
2511 into a compcode bit-based encoding. This function is the inverse of
2512 compcode_to_comparison. */
2514 static enum comparison_code
2515 comparison_to_compcode (enum tree_code code)
2532 return COMPCODE_ORD;
2533 case UNORDERED_EXPR:
2534 return COMPCODE_UNORD;
2536 return COMPCODE_UNLT;
2538 return COMPCODE_UNEQ;
2540 return COMPCODE_UNLE;
2542 return COMPCODE_UNGT;
2544 return COMPCODE_LTGT;
2546 return COMPCODE_UNGE;
2552 /* Convert a compcode bit-based encoding of a comparison operator back
2553 to GCC's enum tree_code representation. This function is the
2554 inverse of comparison_to_compcode. */
2556 static enum tree_code
2557 compcode_to_comparison (enum comparison_code code)
2574 return ORDERED_EXPR;
2575 case COMPCODE_UNORD:
2576 return UNORDERED_EXPR;
2594 /* Return a tree for the comparison which is the combination of
2595 doing the AND or OR (depending on CODE) of the two operations LCODE
2596 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2597 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2598 if this makes the transformation invalid. */
2601 combine_comparisons (enum tree_code code, enum tree_code lcode,
2602 enum tree_code rcode, tree truth_type,
2603 tree ll_arg, tree lr_arg)
2605 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2606 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2607 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2608 enum comparison_code compcode;
2612 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2613 compcode = lcompcode & rcompcode;
2616 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2617 compcode = lcompcode | rcompcode;
2626 /* Eliminate unordered comparisons, as well as LTGT and ORD
2627 which are not used unless the mode has NaNs. */
2628 compcode &= ~COMPCODE_UNORD;
2629 if (compcode == COMPCODE_LTGT)
2630 compcode = COMPCODE_NE;
2631 else if (compcode == COMPCODE_ORD)
2632 compcode = COMPCODE_TRUE;
2634 else if (flag_trapping_math)
2636 /* Check that the original operation and the optimized ones will trap
2637 under the same condition. */
2638 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2639 && (lcompcode != COMPCODE_EQ)
2640 && (lcompcode != COMPCODE_ORD);
2641 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2642 && (rcompcode != COMPCODE_EQ)
2643 && (rcompcode != COMPCODE_ORD);
2644 bool trap = (compcode & COMPCODE_UNORD) == 0
2645 && (compcode != COMPCODE_EQ)
2646 && (compcode != COMPCODE_ORD);
2648 /* In a short-circuited boolean expression the LHS might be
2649 such that the RHS, if evaluated, will never trap. For
2650 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2651 if neither x nor y is NaN. (This is a mixed blessing: for
2652 example, the expression above will never trap, hence
2653 optimizing it to x < y would be invalid). */
2654 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2655 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2658 /* If the comparison was short-circuited, and only the RHS
2659 trapped, we may now generate a spurious trap. */
2661 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2664 /* If we changed the conditions that cause a trap, we lose. */
2665 if ((ltrap || rtrap) != trap)
2669 if (compcode == COMPCODE_TRUE)
2670 return constant_boolean_node (true, truth_type);
2671 else if (compcode == COMPCODE_FALSE)
2672 return constant_boolean_node (false, truth_type);
2674 return fold_build2 (compcode_to_comparison (compcode),
2675 truth_type, ll_arg, lr_arg);
2678 /* Return nonzero if CODE is a tree code that represents a truth value. */
2681 truth_value_p (enum tree_code code)
2683 return (TREE_CODE_CLASS (code) == tcc_comparison
2684 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2685 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2686 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2689 /* Return nonzero if two operands (typically of the same tree node)
2690 are necessarily equal. If either argument has side-effects this
2691 function returns zero. FLAGS modifies behavior as follows:
2693 If OEP_ONLY_CONST is set, only return nonzero for constants.
2694 This function tests whether the operands are indistinguishable;
2695 it does not test whether they are equal using C's == operation.
2696 The distinction is important for IEEE floating point, because
2697 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2698 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2700 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2701 even though it may hold multiple values during a function.
2702 This is because a GCC tree node guarantees that nothing else is
2703 executed between the evaluation of its "operands" (which may often
2704 be evaluated in arbitrary order). Hence if the operands themselves
2705 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2706 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2707 unset means assuming isochronic (or instantaneous) tree equivalence.
2708 Unless comparing arbitrary expression trees, such as from different
2709 statements, this flag can usually be left unset.
2711 If OEP_PURE_SAME is set, then pure functions with identical arguments
2712 are considered the same. It is used when the caller has other ways
2713 to ensure that global memory is unchanged in between. */
2716 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2718 /* If either is ERROR_MARK, they aren't equal. */
2719 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2722 /* If both types don't have the same signedness, then we can't consider
2723 them equal. We must check this before the STRIP_NOPS calls
2724 because they may change the signedness of the arguments. */
2725 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2728 /* If both types don't have the same precision, then it is not safe
2730 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2736 /* In case both args are comparisons but with different comparison
2737 code, try to swap the comparison operands of one arg to produce
2738 a match and compare that variant. */
2739 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2740 && COMPARISON_CLASS_P (arg0)
2741 && COMPARISON_CLASS_P (arg1))
2743 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2745 if (TREE_CODE (arg0) == swap_code)
2746 return operand_equal_p (TREE_OPERAND (arg0, 0),
2747 TREE_OPERAND (arg1, 1), flags)
2748 && operand_equal_p (TREE_OPERAND (arg0, 1),
2749 TREE_OPERAND (arg1, 0), flags);
2752 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2753 /* This is needed for conversions and for COMPONENT_REF.
2754 Might as well play it safe and always test this. */
2755 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2756 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2757 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2760 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2761 We don't care about side effects in that case because the SAVE_EXPR
2762 takes care of that for us. In all other cases, two expressions are
2763 equal if they have no side effects. If we have two identical
2764 expressions with side effects that should be treated the same due
2765 to the only side effects being identical SAVE_EXPR's, that will
2766 be detected in the recursive calls below. */
2767 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2768 && (TREE_CODE (arg0) == SAVE_EXPR
2769 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2772 /* Next handle constant cases, those for which we can return 1 even
2773 if ONLY_CONST is set. */
2774 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2775 switch (TREE_CODE (arg0))
2778 return tree_int_cst_equal (arg0, arg1);
2781 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2782 TREE_REAL_CST (arg1)))
2786 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2788 /* If we do not distinguish between signed and unsigned zero,
2789 consider them equal. */
2790 if (real_zerop (arg0) && real_zerop (arg1))
2799 v1 = TREE_VECTOR_CST_ELTS (arg0);
2800 v2 = TREE_VECTOR_CST_ELTS (arg1);
2803 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2806 v1 = TREE_CHAIN (v1);
2807 v2 = TREE_CHAIN (v2);
2814 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2816 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2820 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2821 && ! memcmp (TREE_STRING_POINTER (arg0),
2822 TREE_STRING_POINTER (arg1),
2823 TREE_STRING_LENGTH (arg0)));
2826 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2832 if (flags & OEP_ONLY_CONST)
2835 /* Define macros to test an operand from arg0 and arg1 for equality and a
2836 variant that allows null and views null as being different from any
2837 non-null value. In the latter case, if either is null, the both
2838 must be; otherwise, do the normal comparison. */
2839 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2840 TREE_OPERAND (arg1, N), flags)
2842 #define OP_SAME_WITH_NULL(N) \
2843 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2844 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2846 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2849 /* Two conversions are equal only if signedness and modes match. */
2850 switch (TREE_CODE (arg0))
2854 case FIX_TRUNC_EXPR:
2855 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2856 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2866 case tcc_comparison:
2868 if (OP_SAME (0) && OP_SAME (1))
2871 /* For commutative ops, allow the other order. */
2872 return (commutative_tree_code (TREE_CODE (arg0))
2873 && operand_equal_p (TREE_OPERAND (arg0, 0),
2874 TREE_OPERAND (arg1, 1), flags)
2875 && operand_equal_p (TREE_OPERAND (arg0, 1),
2876 TREE_OPERAND (arg1, 0), flags));
2879 /* If either of the pointer (or reference) expressions we are
2880 dereferencing contain a side effect, these cannot be equal. */
2881 if (TREE_SIDE_EFFECTS (arg0)
2882 || TREE_SIDE_EFFECTS (arg1))
2885 switch (TREE_CODE (arg0))
2888 case ALIGN_INDIRECT_REF:
2889 case MISALIGNED_INDIRECT_REF:
2895 case ARRAY_RANGE_REF:
2896 /* Operands 2 and 3 may be null.
2897 Compare the array index by value if it is constant first as we
2898 may have different types but same value here. */
2900 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2901 TREE_OPERAND (arg1, 1))
2903 && OP_SAME_WITH_NULL (2)
2904 && OP_SAME_WITH_NULL (3));
2907 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2908 may be NULL when we're called to compare MEM_EXPRs. */
2909 return OP_SAME_WITH_NULL (0)
2911 && OP_SAME_WITH_NULL (2);
2914 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2920 case tcc_expression:
2921 switch (TREE_CODE (arg0))
2924 case TRUTH_NOT_EXPR:
2927 case TRUTH_ANDIF_EXPR:
2928 case TRUTH_ORIF_EXPR:
2929 return OP_SAME (0) && OP_SAME (1);
2931 case TRUTH_AND_EXPR:
2933 case TRUTH_XOR_EXPR:
2934 if (OP_SAME (0) && OP_SAME (1))
2937 /* Otherwise take into account this is a commutative operation. */
2938 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2939 TREE_OPERAND (arg1, 1), flags)
2940 && operand_equal_p (TREE_OPERAND (arg0, 1),
2941 TREE_OPERAND (arg1, 0), flags));
2948 switch (TREE_CODE (arg0))
2951 /* If the CALL_EXPRs call different functions, then they
2952 clearly can not be equal. */
2953 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2958 unsigned int cef = call_expr_flags (arg0);
2959 if (flags & OEP_PURE_SAME)
2960 cef &= ECF_CONST | ECF_PURE;
2967 /* Now see if all the arguments are the same. */
2969 call_expr_arg_iterator iter0, iter1;
2971 for (a0 = first_call_expr_arg (arg0, &iter0),
2972 a1 = first_call_expr_arg (arg1, &iter1);
2974 a0 = next_call_expr_arg (&iter0),
2975 a1 = next_call_expr_arg (&iter1))
2976 if (! operand_equal_p (a0, a1, flags))
2979 /* If we get here and both argument lists are exhausted
2980 then the CALL_EXPRs are equal. */
2981 return ! (a0 || a1);
2987 case tcc_declaration:
2988 /* Consider __builtin_sqrt equal to sqrt. */
2989 return (TREE_CODE (arg0) == FUNCTION_DECL
2990 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2991 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2992 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2999 #undef OP_SAME_WITH_NULL
3002 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3003 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3005 When in doubt, return 0. */
3008 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3010 int unsignedp1, unsignedpo;
3011 tree primarg0, primarg1, primother;
3012 unsigned int correct_width;
3014 if (operand_equal_p (arg0, arg1, 0))
3017 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3018 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3021 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3022 and see if the inner values are the same. This removes any
3023 signedness comparison, which doesn't matter here. */
3024 primarg0 = arg0, primarg1 = arg1;
3025 STRIP_NOPS (primarg0);
3026 STRIP_NOPS (primarg1);
3027 if (operand_equal_p (primarg0, primarg1, 0))
3030 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3031 actual comparison operand, ARG0.
3033 First throw away any conversions to wider types
3034 already present in the operands. */
3036 primarg1 = get_narrower (arg1, &unsignedp1);
3037 primother = get_narrower (other, &unsignedpo);
3039 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3040 if (unsignedp1 == unsignedpo
3041 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3042 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3044 tree type = TREE_TYPE (arg0);
3046 /* Make sure shorter operand is extended the right way
3047 to match the longer operand. */
3048 primarg1 = fold_convert (signed_or_unsigned_type_for
3049 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3051 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3058 /* See if ARG is an expression that is either a comparison or is performing
3059 arithmetic on comparisons. The comparisons must only be comparing
3060 two different values, which will be stored in *CVAL1 and *CVAL2; if
3061 they are nonzero it means that some operands have already been found.
3062 No variables may be used anywhere else in the expression except in the
3063 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3064 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3066 If this is true, return 1. Otherwise, return zero. */
3069 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3071 enum tree_code code = TREE_CODE (arg);
3072 enum tree_code_class class = TREE_CODE_CLASS (code);
3074 /* We can handle some of the tcc_expression cases here. */
3075 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3077 else if (class == tcc_expression
3078 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3079 || code == COMPOUND_EXPR))
3082 else if (class == tcc_expression && code == SAVE_EXPR
3083 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3085 /* If we've already found a CVAL1 or CVAL2, this expression is
3086 two complex to handle. */
3087 if (*cval1 || *cval2)
3097 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3100 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3101 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3102 cval1, cval2, save_p));
3107 case tcc_expression:
3108 if (code == COND_EXPR)
3109 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3110 cval1, cval2, save_p)
3111 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3112 cval1, cval2, save_p)
3113 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3114 cval1, cval2, save_p));
3117 case tcc_comparison:
3118 /* First see if we can handle the first operand, then the second. For
3119 the second operand, we know *CVAL1 can't be zero. It must be that
3120 one side of the comparison is each of the values; test for the
3121 case where this isn't true by failing if the two operands
3124 if (operand_equal_p (TREE_OPERAND (arg, 0),
3125 TREE_OPERAND (arg, 1), 0))
3129 *cval1 = TREE_OPERAND (arg, 0);
3130 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3132 else if (*cval2 == 0)
3133 *cval2 = TREE_OPERAND (arg, 0);
3134 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3139 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3141 else if (*cval2 == 0)
3142 *cval2 = TREE_OPERAND (arg, 1);
3143 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3155 /* ARG is a tree that is known to contain just arithmetic operations and
3156 comparisons. Evaluate the operations in the tree substituting NEW0 for
3157 any occurrence of OLD0 as an operand of a comparison and likewise for
3161 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3163 tree type = TREE_TYPE (arg);
3164 enum tree_code code = TREE_CODE (arg);
3165 enum tree_code_class class = TREE_CODE_CLASS (code);
3167 /* We can handle some of the tcc_expression cases here. */
3168 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3170 else if (class == tcc_expression
3171 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3177 return fold_build1 (code, type,
3178 eval_subst (TREE_OPERAND (arg, 0),
3179 old0, new0, old1, new1));
3182 return fold_build2 (code, type,
3183 eval_subst (TREE_OPERAND (arg, 0),
3184 old0, new0, old1, new1),
3185 eval_subst (TREE_OPERAND (arg, 1),
3186 old0, new0, old1, new1));
3188 case tcc_expression:
3192 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3195 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3198 return fold_build3 (code, type,
3199 eval_subst (TREE_OPERAND (arg, 0),
3200 old0, new0, old1, new1),
3201 eval_subst (TREE_OPERAND (arg, 1),
3202 old0, new0, old1, new1),
3203 eval_subst (TREE_OPERAND (arg, 2),
3204 old0, new0, old1, new1));
3208 /* Fall through - ??? */
3210 case tcc_comparison:
3212 tree arg0 = TREE_OPERAND (arg, 0);
3213 tree arg1 = TREE_OPERAND (arg, 1);
3215 /* We need to check both for exact equality and tree equality. The
3216 former will be true if the operand has a side-effect. In that
3217 case, we know the operand occurred exactly once. */
3219 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3221 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3224 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3226 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3229 return fold_build2 (code, type, arg0, arg1);
3237 /* Return a tree for the case when the result of an expression is RESULT
3238 converted to TYPE and OMITTED was previously an operand of the expression
3239 but is now not needed (e.g., we folded OMITTED * 0).
3241 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3242 the conversion of RESULT to TYPE. */
3245 omit_one_operand (tree type, tree result, tree omitted)
3247 tree t = fold_convert (type, result);
3249 if (TREE_SIDE_EFFECTS (omitted))
3250 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3252 return non_lvalue (t);
3255 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3258 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3260 tree t = fold_convert (type, result);
3262 if (TREE_SIDE_EFFECTS (omitted))
3263 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3265 return pedantic_non_lvalue (t);
3268 /* Return a tree for the case when the result of an expression is RESULT
3269 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3270 of the expression but are now not needed.
3272 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3273 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3274 evaluated before OMITTED2. Otherwise, if neither has side effects,
3275 just do the conversion of RESULT to TYPE. */
3278 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3280 tree t = fold_convert (type, result);
3282 if (TREE_SIDE_EFFECTS (omitted2))
3283 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3284 if (TREE_SIDE_EFFECTS (omitted1))
3285 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3287 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3291 /* Return a simplified tree node for the truth-negation of ARG. This
3292 never alters ARG itself. We assume that ARG is an operation that
3293 returns a truth value (0 or 1).
3295 FIXME: one would think we would fold the result, but it causes
3296 problems with the dominator optimizer. */
3299 fold_truth_not_expr (tree arg)
3301 tree type = TREE_TYPE (arg);
3302 enum tree_code code = TREE_CODE (arg);
3304 /* If this is a comparison, we can simply invert it, except for
3305 floating-point non-equality comparisons, in which case we just
3306 enclose a TRUTH_NOT_EXPR around what we have. */
3308 if (TREE_CODE_CLASS (code) == tcc_comparison)
3310 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3311 if (FLOAT_TYPE_P (op_type)
3312 && flag_trapping_math
3313 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3314 && code != NE_EXPR && code != EQ_EXPR)
3318 code = invert_tree_comparison (code,
3319 HONOR_NANS (TYPE_MODE (op_type)));
3320 if (code == ERROR_MARK)
3323 return build2 (code, type,
3324 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3331 return constant_boolean_node (integer_zerop (arg), type);
3333 case TRUTH_AND_EXPR:
3334 return build2 (TRUTH_OR_EXPR, type,
3335 invert_truthvalue (TREE_OPERAND (arg, 0)),
3336 invert_truthvalue (TREE_OPERAND (arg, 1)));
3339 return build2 (TRUTH_AND_EXPR, type,
3340 invert_truthvalue (TREE_OPERAND (arg, 0)),
3341 invert_truthvalue (TREE_OPERAND (arg, 1)));
3343 case TRUTH_XOR_EXPR:
3344 /* Here we can invert either operand. We invert the first operand
3345 unless the second operand is a TRUTH_NOT_EXPR in which case our
3346 result is the XOR of the first operand with the inside of the
3347 negation of the second operand. */
3349 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3350 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3351 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3353 return build2 (TRUTH_XOR_EXPR, type,
3354 invert_truthvalue (TREE_OPERAND (arg, 0)),
3355 TREE_OPERAND (arg, 1));
3357 case TRUTH_ANDIF_EXPR:
3358 return build2 (TRUTH_ORIF_EXPR, type,
3359 invert_truthvalue (TREE_OPERAND (arg, 0)),
3360 invert_truthvalue (TREE_OPERAND (arg, 1)));
3362 case TRUTH_ORIF_EXPR:
3363 return build2 (TRUTH_ANDIF_EXPR, type,
3364 invert_truthvalue (TREE_OPERAND (arg, 0)),
3365 invert_truthvalue (TREE_OPERAND (arg, 1)));
3367 case TRUTH_NOT_EXPR:
3368 return TREE_OPERAND (arg, 0);
3372 tree arg1 = TREE_OPERAND (arg, 1);
3373 tree arg2 = TREE_OPERAND (arg, 2);
3374 /* A COND_EXPR may have a throw as one operand, which
3375 then has void type. Just leave void operands
3377 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3378 VOID_TYPE_P (TREE_TYPE (arg1))
3379 ? arg1 : invert_truthvalue (arg1),
3380 VOID_TYPE_P (TREE_TYPE (arg2))
3381 ? arg2 : invert_truthvalue (arg2));
3385 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3386 invert_truthvalue (TREE_OPERAND (arg, 1)));
3388 case NON_LVALUE_EXPR:
3389 return invert_truthvalue (TREE_OPERAND (arg, 0));
3392 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3393 return build1 (TRUTH_NOT_EXPR, type, arg);
3397 return build1 (TREE_CODE (arg), type,
3398 invert_truthvalue (TREE_OPERAND (arg, 0)));
3401 if (!integer_onep (TREE_OPERAND (arg, 1)))
3403 return build2 (EQ_EXPR, type, arg,
3404 build_int_cst (type, 0));
3407 return build1 (TRUTH_NOT_EXPR, type, arg);
3409 case CLEANUP_POINT_EXPR:
3410 return build1 (CLEANUP_POINT_EXPR, type,
3411 invert_truthvalue (TREE_OPERAND (arg, 0)));
3420 /* Return a simplified tree node for the truth-negation of ARG. This
3421 never alters ARG itself. We assume that ARG is an operation that
3422 returns a truth value (0 or 1).
3424 FIXME: one would think we would fold the result, but it causes
3425 problems with the dominator optimizer. */
3428 invert_truthvalue (tree arg)
3432 if (TREE_CODE (arg) == ERROR_MARK)
3435 tem = fold_truth_not_expr (arg);
3437 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3442 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3443 operands are another bit-wise operation with a common input. If so,
3444 distribute the bit operations to save an operation and possibly two if
3445 constants are involved. For example, convert
3446 (A | B) & (A | C) into A | (B & C)
3447 Further simplification will occur if B and C are constants.
3449 If this optimization cannot be done, 0 will be returned. */
3452 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3457 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3458 || TREE_CODE (arg0) == code
3459 || (TREE_CODE (arg0) != BIT_AND_EXPR
3460 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3463 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3465 common = TREE_OPERAND (arg0, 0);
3466 left = TREE_OPERAND (arg0, 1);
3467 right = TREE_OPERAND (arg1, 1);
3469 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3471 common = TREE_OPERAND (arg0, 0);
3472 left = TREE_OPERAND (arg0, 1);
3473 right = TREE_OPERAND (arg1, 0);
3475 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3477 common = TREE_OPERAND (arg0, 1);
3478 left = TREE_OPERAND (arg0, 0);
3479 right = TREE_OPERAND (arg1, 1);
3481 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3483 common = TREE_OPERAND (arg0, 1);
3484 left = TREE_OPERAND (arg0, 0);
3485 right = TREE_OPERAND (arg1, 0);
3490 return fold_build2 (TREE_CODE (arg0), type, common,
3491 fold_build2 (code, type, left, right));
3494 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3495 with code CODE. This optimization is unsafe. */
3497 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3499 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3500 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3502 /* (A / C) +- (B / C) -> (A +- B) / C. */
3504 && operand_equal_p (TREE_OPERAND (arg0, 1),
3505 TREE_OPERAND (arg1, 1), 0))
3506 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3507 fold_build2 (code, type,
3508 TREE_OPERAND (arg0, 0),
3509 TREE_OPERAND (arg1, 0)),
3510 TREE_OPERAND (arg0, 1));
3512 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3513 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3514 TREE_OPERAND (arg1, 0), 0)
3515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3516 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3518 REAL_VALUE_TYPE r0, r1;
3519 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3520 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3522 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3524 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3525 real_arithmetic (&r0, code, &r0, &r1);
3526 return fold_build2 (MULT_EXPR, type,
3527 TREE_OPERAND (arg0, 0),
3528 build_real (type, r0));
3534 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3535 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3538 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3545 tree size = TYPE_SIZE (TREE_TYPE (inner));
3546 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3547 || POINTER_TYPE_P (TREE_TYPE (inner)))
3548 && host_integerp (size, 0)
3549 && tree_low_cst (size, 0) == bitsize)
3550 return fold_convert (type, inner);
3553 result = build3 (BIT_FIELD_REF, type, inner,
3554 size_int (bitsize), bitsize_int (bitpos));
3556 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3561 /* Optimize a bit-field compare.
3563 There are two cases: First is a compare against a constant and the
3564 second is a comparison of two items where the fields are at the same
3565 bit position relative to the start of a chunk (byte, halfword, word)
3566 large enough to contain it. In these cases we can avoid the shift
3567 implicit in bitfield extractions.
3569 For constants, we emit a compare of the shifted constant with the
3570 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3571 compared. For two fields at the same position, we do the ANDs with the
3572 similar mask and compare the result of the ANDs.
3574 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3575 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3576 are the left and right operands of the comparison, respectively.
3578 If the optimization described above can be done, we return the resulting
3579 tree. Otherwise we return zero. */
3582 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3585 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3586 tree type = TREE_TYPE (lhs);
3587 tree signed_type, unsigned_type;
3588 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3589 enum machine_mode lmode, rmode, nmode;
3590 int lunsignedp, runsignedp;
3591 int lvolatilep = 0, rvolatilep = 0;
3592 tree linner, rinner = NULL_TREE;
3596 /* Get all the information about the extractions being done. If the bit size
3597 if the same as the size of the underlying object, we aren't doing an
3598 extraction at all and so can do nothing. We also don't want to
3599 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3600 then will no longer be able to replace it. */
3601 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3602 &lunsignedp, &lvolatilep, false);
3603 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3604 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3609 /* If this is not a constant, we can only do something if bit positions,
3610 sizes, and signedness are the same. */
3611 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3612 &runsignedp, &rvolatilep, false);
3614 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3615 || lunsignedp != runsignedp || offset != 0
3616 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3620 /* See if we can find a mode to refer to this field. We should be able to,
3621 but fail if we can't. */
3622 nmode = get_best_mode (lbitsize, lbitpos,
3623 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3624 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3625 TYPE_ALIGN (TREE_TYPE (rinner))),
3626 word_mode, lvolatilep || rvolatilep);
3627 if (nmode == VOIDmode)
3630 /* Set signed and unsigned types of the precision of this mode for the
3632 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3633 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3635 /* Compute the bit position and size for the new reference and our offset
3636 within it. If the new reference is the same size as the original, we
3637 won't optimize anything, so return zero. */
3638 nbitsize = GET_MODE_BITSIZE (nmode);
3639 nbitpos = lbitpos & ~ (nbitsize - 1);
3641 if (nbitsize == lbitsize)
3644 if (BYTES_BIG_ENDIAN)
3645 lbitpos = nbitsize - lbitsize - lbitpos;
3647 /* Make the mask to be used against the extracted field. */
3648 mask = build_int_cst_type (unsigned_type, -1);
3649 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3650 mask = const_binop (RSHIFT_EXPR, mask,
3651 size_int (nbitsize - lbitsize - lbitpos), 0);
3654 /* If not comparing with constant, just rework the comparison
3656 return fold_build2 (code, compare_type,
3657 fold_build2 (BIT_AND_EXPR, unsigned_type,
3658 make_bit_field_ref (linner,
3663 fold_build2 (BIT_AND_EXPR, unsigned_type,
3664 make_bit_field_ref (rinner,
3670 /* Otherwise, we are handling the constant case. See if the constant is too
3671 big for the field. Warn and return a tree of for 0 (false) if so. We do
3672 this not only for its own sake, but to avoid having to test for this
3673 error case below. If we didn't, we might generate wrong code.
3675 For unsigned fields, the constant shifted right by the field length should
3676 be all zero. For signed fields, the high-order bits should agree with
3681 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3682 fold_convert (unsigned_type, rhs),
3683 size_int (lbitsize), 0)))
3685 warning (0, "comparison is always %d due to width of bit-field",
3687 return constant_boolean_node (code == NE_EXPR, compare_type);
3692 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3693 size_int (lbitsize - 1), 0);
3694 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3696 warning (0, "comparison is always %d due to width of bit-field",
3698 return constant_boolean_node (code == NE_EXPR, compare_type);
3702 /* Single-bit compares should always be against zero. */
3703 if (lbitsize == 1 && ! integer_zerop (rhs))
3705 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3706 rhs = build_int_cst (type, 0);
3709 /* Make a new bitfield reference, shift the constant over the
3710 appropriate number of bits and mask it with the computed mask
3711 (in case this was a signed field). If we changed it, make a new one. */
3712 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3715 TREE_SIDE_EFFECTS (lhs) = 1;
3716 TREE_THIS_VOLATILE (lhs) = 1;
3719 rhs = const_binop (BIT_AND_EXPR,
3720 const_binop (LSHIFT_EXPR,
3721 fold_convert (unsigned_type, rhs),
3722 size_int (lbitpos), 0),
3725 return build2 (code, compare_type,
3726 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3730 /* Subroutine for fold_truthop: decode a field reference.
3732 If EXP is a comparison reference, we return the innermost reference.
3734 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3735 set to the starting bit number.
3737 If the innermost field can be completely contained in a mode-sized
3738 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3740 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3741 otherwise it is not changed.
3743 *PUNSIGNEDP is set to the signedness of the field.
3745 *PMASK is set to the mask used. This is either contained in a
3746 BIT_AND_EXPR or derived from the width of the field.
3748 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3750 Return 0 if this is not a component reference or is one that we can't
3751 do anything with. */
3754 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3755 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3756 int *punsignedp, int *pvolatilep,
3757 tree *pmask, tree *pand_mask)
3759 tree outer_type = 0;
3761 tree mask, inner, offset;
3763 unsigned int precision;
3765 /* All the optimizations using this function assume integer fields.
3766 There are problems with FP fields since the type_for_size call
3767 below can fail for, e.g., XFmode. */
3768 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3771 /* We are interested in the bare arrangement of bits, so strip everything
3772 that doesn't affect the machine mode. However, record the type of the
3773 outermost expression if it may matter below. */
3774 if (TREE_CODE (exp) == NOP_EXPR
3775 || TREE_CODE (exp) == CONVERT_EXPR
3776 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3777 outer_type = TREE_TYPE (exp);
3780 if (TREE_CODE (exp) == BIT_AND_EXPR)
3782 and_mask = TREE_OPERAND (exp, 1);
3783 exp = TREE_OPERAND (exp, 0);
3784 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3785 if (TREE_CODE (and_mask) != INTEGER_CST)
3789 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3790 punsignedp, pvolatilep, false);
3791 if ((inner == exp && and_mask == 0)
3792 || *pbitsize < 0 || offset != 0
3793 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3796 /* If the number of bits in the reference is the same as the bitsize of
3797 the outer type, then the outer type gives the signedness. Otherwise
3798 (in case of a small bitfield) the signedness is unchanged. */
3799 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3800 *punsignedp = TYPE_UNSIGNED (outer_type);
3802 /* Compute the mask to access the bitfield. */
3803 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3804 precision = TYPE_PRECISION (unsigned_type);
3806 mask = build_int_cst_type (unsigned_type, -1);
3808 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3809 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3811 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3813 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3814 fold_convert (unsigned_type, and_mask), mask);
3817 *pand_mask = and_mask;
3821 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3825 all_ones_mask_p (tree mask, int size)
3827 tree type = TREE_TYPE (mask);
3828 unsigned int precision = TYPE_PRECISION (type);
3831 tmask = build_int_cst_type (signed_type_for (type), -1);
3834 tree_int_cst_equal (mask,
3835 const_binop (RSHIFT_EXPR,
3836 const_binop (LSHIFT_EXPR, tmask,
3837 size_int (precision - size),
3839 size_int (precision - size), 0));
3842 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3843 represents the sign bit of EXP's type. If EXP represents a sign
3844 or zero extension, also test VAL against the unextended type.
3845 The return value is the (sub)expression whose sign bit is VAL,
3846 or NULL_TREE otherwise. */
3849 sign_bit_p (tree exp, tree val)
3851 unsigned HOST_WIDE_INT mask_lo, lo;
3852 HOST_WIDE_INT mask_hi, hi;
3856 /* Tree EXP must have an integral type. */
3857 t = TREE_TYPE (exp);
3858 if (! INTEGRAL_TYPE_P (t))
3861 /* Tree VAL must be an integer constant. */
3862 if (TREE_CODE (val) != INTEGER_CST
3863 || TREE_OVERFLOW (val))
3866 width = TYPE_PRECISION (t);
3867 if (width > HOST_BITS_PER_WIDE_INT)
3869 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3872 mask_hi = ((unsigned HOST_WIDE_INT) -1
3873 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3879 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3882 mask_lo = ((unsigned HOST_WIDE_INT) -1
3883 >> (HOST_BITS_PER_WIDE_INT - width));
3886 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3887 treat VAL as if it were unsigned. */
3888 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3889 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3892 /* Handle extension from a narrower type. */
3893 if (TREE_CODE (exp) == NOP_EXPR
3894 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3895 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3900 /* Subroutine for fold_truthop: determine if an operand is simple enough
3901 to be evaluated unconditionally. */
3904 simple_operand_p (tree exp)
3906 /* Strip any conversions that don't change the machine mode. */
3909 return (CONSTANT_CLASS_P (exp)
3910 || TREE_CODE (exp) == SSA_NAME
3912 && ! TREE_ADDRESSABLE (exp)
3913 && ! TREE_THIS_VOLATILE (exp)
3914 && ! DECL_NONLOCAL (exp)
3915 /* Don't regard global variables as simple. They may be
3916 allocated in ways unknown to the compiler (shared memory,
3917 #pragma weak, etc). */
3918 && ! TREE_PUBLIC (exp)
3919 && ! DECL_EXTERNAL (exp)
3920 /* Loading a static variable is unduly expensive, but global
3921 registers aren't expensive. */
3922 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3925 /* The following functions are subroutines to fold_range_test and allow it to
3926 try to change a logical combination of comparisons into a range test.
3929 X == 2 || X == 3 || X == 4 || X == 5
3933 (unsigned) (X - 2) <= 3
3935 We describe each set of comparisons as being either inside or outside
3936 a range, using a variable named like IN_P, and then describe the
3937 range with a lower and upper bound. If one of the bounds is omitted,
3938 it represents either the highest or lowest value of the type.
3940 In the comments below, we represent a range by two numbers in brackets
3941 preceded by a "+" to designate being inside that range, or a "-" to
3942 designate being outside that range, so the condition can be inverted by
3943 flipping the prefix. An omitted bound is represented by a "-". For
3944 example, "- [-, 10]" means being outside the range starting at the lowest
3945 possible value and ending at 10, in other words, being greater than 10.
3946 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3949 We set up things so that the missing bounds are handled in a consistent
3950 manner so neither a missing bound nor "true" and "false" need to be
3951 handled using a special case. */
3953 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3954 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3955 and UPPER1_P are nonzero if the respective argument is an upper bound
3956 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3957 must be specified for a comparison. ARG1 will be converted to ARG0's
3958 type if both are specified. */
3961 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3962 tree arg1, int upper1_p)
3968 /* If neither arg represents infinity, do the normal operation.
3969 Else, if not a comparison, return infinity. Else handle the special
3970 comparison rules. Note that most of the cases below won't occur, but
3971 are handled for consistency. */
3973 if (arg0 != 0 && arg1 != 0)
3975 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3976 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3978 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3981 if (TREE_CODE_CLASS (code) != tcc_comparison)
3984 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3985 for neither. In real maths, we cannot assume open ended ranges are
3986 the same. But, this is computer arithmetic, where numbers are finite.
3987 We can therefore make the transformation of any unbounded range with
3988 the value Z, Z being greater than any representable number. This permits
3989 us to treat unbounded ranges as equal. */
3990 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3991 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3995 result = sgn0 == sgn1;
3998 result = sgn0 != sgn1;
4001 result = sgn0 < sgn1;
4004 result = sgn0 <= sgn1;
4007 result = sgn0 > sgn1;
4010 result = sgn0 >= sgn1;
4016 return constant_boolean_node (result, type);
4019 /* Given EXP, a logical expression, set the range it is testing into
4020 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4021 actually being tested. *PLOW and *PHIGH will be made of the same
4022 type as the returned expression. If EXP is not a comparison, we
4023 will most likely not be returning a useful value and range. Set
4024 *STRICT_OVERFLOW_P to true if the return value is only valid
4025 because signed overflow is undefined; otherwise, do not change
4026 *STRICT_OVERFLOW_P. */
4029 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4030 bool *strict_overflow_p)
4032 enum tree_code code;
4033 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4034 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4036 tree low, high, n_low, n_high;
4038 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4039 and see if we can refine the range. Some of the cases below may not
4040 happen, but it doesn't seem worth worrying about this. We "continue"
4041 the outer loop when we've changed something; otherwise we "break"
4042 the switch, which will "break" the while. */
4045 low = high = build_int_cst (TREE_TYPE (exp), 0);
4049 code = TREE_CODE (exp);
4050 exp_type = TREE_TYPE (exp);
4052 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4054 if (TREE_OPERAND_LENGTH (exp) > 0)
4055 arg0 = TREE_OPERAND (exp, 0);
4056 if (TREE_CODE_CLASS (code) == tcc_comparison
4057 || TREE_CODE_CLASS (code) == tcc_unary
4058 || TREE_CODE_CLASS (code) == tcc_binary)
4059 arg0_type = TREE_TYPE (arg0);
4060 if (TREE_CODE_CLASS (code) == tcc_binary
4061 || TREE_CODE_CLASS (code) == tcc_comparison
4062 || (TREE_CODE_CLASS (code) == tcc_expression
4063 && TREE_OPERAND_LENGTH (exp) > 1))
4064 arg1 = TREE_OPERAND (exp, 1);
4069 case TRUTH_NOT_EXPR:
4070 in_p = ! in_p, exp = arg0;
4073 case EQ_EXPR: case NE_EXPR:
4074 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4075 /* We can only do something if the range is testing for zero
4076 and if the second operand is an integer constant. Note that
4077 saying something is "in" the range we make is done by
4078 complementing IN_P since it will set in the initial case of
4079 being not equal to zero; "out" is leaving it alone. */
4080 if (low == 0 || high == 0
4081 || ! integer_zerop (low) || ! integer_zerop (high)
4082 || TREE_CODE (arg1) != INTEGER_CST)
4087 case NE_EXPR: /* - [c, c] */
4090 case EQ_EXPR: /* + [c, c] */
4091 in_p = ! in_p, low = high = arg1;
4093 case GT_EXPR: /* - [-, c] */
4094 low = 0, high = arg1;
4096 case GE_EXPR: /* + [c, -] */
4097 in_p = ! in_p, low = arg1, high = 0;
4099 case LT_EXPR: /* - [c, -] */
4100 low = arg1, high = 0;
4102 case LE_EXPR: /* + [-, c] */
4103 in_p = ! in_p, low = 0, high = arg1;
4109 /* If this is an unsigned comparison, we also know that EXP is
4110 greater than or equal to zero. We base the range tests we make
4111 on that fact, so we record it here so we can parse existing
4112 range tests. We test arg0_type since often the return type
4113 of, e.g. EQ_EXPR, is boolean. */
4114 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4116 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4118 build_int_cst (arg0_type, 0),
4122 in_p = n_in_p, low = n_low, high = n_high;
4124 /* If the high bound is missing, but we have a nonzero low
4125 bound, reverse the range so it goes from zero to the low bound
4127 if (high == 0 && low && ! integer_zerop (low))
4130 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4131 integer_one_node, 0);
4132 low = build_int_cst (arg0_type, 0);
4140 /* (-x) IN [a,b] -> x in [-b, -a] */
4141 n_low = range_binop (MINUS_EXPR, exp_type,
4142 build_int_cst (exp_type, 0),
4144 n_high = range_binop (MINUS_EXPR, exp_type,
4145 build_int_cst (exp_type, 0),
4147 low = n_low, high = n_high;
4153 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4154 build_int_cst (exp_type, 1));
4157 case PLUS_EXPR: case MINUS_EXPR:
4158 if (TREE_CODE (arg1) != INTEGER_CST)
4161 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4162 move a constant to the other side. */
4163 if (!TYPE_UNSIGNED (arg0_type)
4164 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4167 /* If EXP is signed, any overflow in the computation is undefined,
4168 so we don't worry about it so long as our computations on
4169 the bounds don't overflow. For unsigned, overflow is defined
4170 and this is exactly the right thing. */
4171 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4172 arg0_type, low, 0, arg1, 0);
4173 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4174 arg0_type, high, 1, arg1, 0);
4175 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4176 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4179 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4180 *strict_overflow_p = true;
4182 /* Check for an unsigned range which has wrapped around the maximum
4183 value thus making n_high < n_low, and normalize it. */
4184 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4186 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4187 integer_one_node, 0);
4188 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4189 integer_one_node, 0);
4191 /* If the range is of the form +/- [ x+1, x ], we won't
4192 be able to normalize it. But then, it represents the
4193 whole range or the empty set, so make it
4195 if (tree_int_cst_equal (n_low, low)
4196 && tree_int_cst_equal (n_high, high))
4202 low = n_low, high = n_high;
4207 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4208 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4211 if (! INTEGRAL_TYPE_P (arg0_type)
4212 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4213 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4216 n_low = low, n_high = high;
4219 n_low = fold_convert (arg0_type, n_low);
4222 n_high = fold_convert (arg0_type, n_high);
4225 /* If we're converting arg0 from an unsigned type, to exp,
4226 a signed type, we will be doing the comparison as unsigned.
4227 The tests above have already verified that LOW and HIGH
4230 So we have to ensure that we will handle large unsigned
4231 values the same way that the current signed bounds treat
4234 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4237 tree equiv_type = lang_hooks.types.type_for_mode
4238 (TYPE_MODE (arg0_type), 1);
4240 /* A range without an upper bound is, naturally, unbounded.
4241 Since convert would have cropped a very large value, use
4242 the max value for the destination type. */
4244 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4245 : TYPE_MAX_VALUE (arg0_type);
4247 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4248 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4249 fold_convert (arg0_type,
4251 build_int_cst (arg0_type, 1));
4253 /* If the low bound is specified, "and" the range with the
4254 range for which the original unsigned value will be
4258 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4259 1, n_low, n_high, 1,
4260 fold_convert (arg0_type,
4265 in_p = (n_in_p == in_p);
4269 /* Otherwise, "or" the range with the range of the input
4270 that will be interpreted as negative. */
4271 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4272 0, n_low, n_high, 1,
4273 fold_convert (arg0_type,
4278 in_p = (in_p != n_in_p);
4283 low = n_low, high = n_high;
4293 /* If EXP is a constant, we can evaluate whether this is true or false. */
4294 if (TREE_CODE (exp) == INTEGER_CST)
4296 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4298 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4304 *pin_p = in_p, *plow = low, *phigh = high;
4308 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4309 type, TYPE, return an expression to test if EXP is in (or out of, depending
4310 on IN_P) the range. Return 0 if the test couldn't be created. */
4313 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4315 tree etype = TREE_TYPE (exp);
4318 #ifdef HAVE_canonicalize_funcptr_for_compare
4319 /* Disable this optimization for function pointer expressions
4320 on targets that require function pointer canonicalization. */
4321 if (HAVE_canonicalize_funcptr_for_compare
4322 && TREE_CODE (etype) == POINTER_TYPE
4323 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4329 value = build_range_check (type, exp, 1, low, high);
4331 return invert_truthvalue (value);
4336 if (low == 0 && high == 0)
4337 return build_int_cst (type, 1);
4340 return fold_build2 (LE_EXPR, type, exp,
4341 fold_convert (etype, high));
4344 return fold_build2 (GE_EXPR, type, exp,
4345 fold_convert (etype, low));
4347 if (operand_equal_p (low, high, 0))
4348 return fold_build2 (EQ_EXPR, type, exp,
4349 fold_convert (etype, low));
4351 if (integer_zerop (low))
4353 if (! TYPE_UNSIGNED (etype))
4355 etype = unsigned_type_for (etype);
4356 high = fold_convert (etype, high);
4357 exp = fold_convert (etype, exp);
4359 return build_range_check (type, exp, 1, 0, high);
4362 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4363 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4365 unsigned HOST_WIDE_INT lo;
4369 prec = TYPE_PRECISION (etype);
4370 if (prec <= HOST_BITS_PER_WIDE_INT)
4373 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4377 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4378 lo = (unsigned HOST_WIDE_INT) -1;
4381 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4383 if (TYPE_UNSIGNED (etype))
4385 etype = signed_type_for (etype);
4386 exp = fold_convert (etype, exp);
4388 return fold_build2 (GT_EXPR, type, exp,
4389 build_int_cst (etype, 0));
4393 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4394 This requires wrap-around arithmetics for the type of the expression. */
4395 switch (TREE_CODE (etype))
4398 /* There is no requirement that LOW be within the range of ETYPE
4399 if the latter is a subtype. It must, however, be within the base
4400 type of ETYPE. So be sure we do the subtraction in that type. */
4401 if (TREE_TYPE (etype))
4402 etype = TREE_TYPE (etype);
4407 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4408 TYPE_UNSIGNED (etype));
4415 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4416 if (TREE_CODE (etype) == INTEGER_TYPE
4417 && !TYPE_OVERFLOW_WRAPS (etype))
4419 tree utype, minv, maxv;
4421 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4422 for the type in question, as we rely on this here. */
4423 utype = unsigned_type_for (etype);
4424 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4425 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4426 integer_one_node, 1);
4427 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4429 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4436 high = fold_convert (etype, high);
4437 low = fold_convert (etype, low);
4438 exp = fold_convert (etype, exp);
4440 value = const_binop (MINUS_EXPR, high, low, 0);
4443 if (POINTER_TYPE_P (etype))
4445 if (value != 0 && !TREE_OVERFLOW (value))
4447 low = fold_convert (sizetype, low);
4448 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4449 return build_range_check (type,
4450 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4451 1, build_int_cst (etype, 0), value);
4456 if (value != 0 && !TREE_OVERFLOW (value))
4457 return build_range_check (type,
4458 fold_build2 (MINUS_EXPR, etype, exp, low),
4459 1, build_int_cst (etype, 0), value);
4464 /* Return the predecessor of VAL in its type, handling the infinite case. */
4467 range_predecessor (tree val)
4469 tree type = TREE_TYPE (val);
4471 if (INTEGRAL_TYPE_P (type)
4472 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4475 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4478 /* Return the successor of VAL in its type, handling the infinite case. */
4481 range_successor (tree val)
4483 tree type = TREE_TYPE (val);
4485 if (INTEGRAL_TYPE_P (type)
4486 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4489 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4492 /* Given two ranges, see if we can merge them into one. Return 1 if we
4493 can, 0 if we can't. Set the output range into the specified parameters. */
4496 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4497 tree high0, int in1_p, tree low1, tree high1)
4505 int lowequal = ((low0 == 0 && low1 == 0)
4506 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4507 low0, 0, low1, 0)));
4508 int highequal = ((high0 == 0 && high1 == 0)
4509 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4510 high0, 1, high1, 1)));
4512 /* Make range 0 be the range that starts first, or ends last if they
4513 start at the same value. Swap them if it isn't. */
4514 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4517 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4518 high1, 1, high0, 1))))
4520 temp = in0_p, in0_p = in1_p, in1_p = temp;
4521 tem = low0, low0 = low1, low1 = tem;
4522 tem = high0, high0 = high1, high1 = tem;
4525 /* Now flag two cases, whether the ranges are disjoint or whether the
4526 second range is totally subsumed in the first. Note that the tests
4527 below are simplified by the ones above. */
4528 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4529 high0, 1, low1, 0));
4530 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4531 high1, 1, high0, 1));
4533 /* We now have four cases, depending on whether we are including or
4534 excluding the two ranges. */
4537 /* If they don't overlap, the result is false. If the second range
4538 is a subset it is the result. Otherwise, the range is from the start
4539 of the second to the end of the first. */
4541 in_p = 0, low = high = 0;
4543 in_p = 1, low = low1, high = high1;
4545 in_p = 1, low = low1, high = high0;
4548 else if (in0_p && ! in1_p)
4550 /* If they don't overlap, the result is the first range. If they are
4551 equal, the result is false. If the second range is a subset of the
4552 first, and the ranges begin at the same place, we go from just after
4553 the end of the second range to the end of the first. If the second
4554 range is not a subset of the first, or if it is a subset and both
4555 ranges end at the same place, the range starts at the start of the
4556 first range and ends just before the second range.
4557 Otherwise, we can't describe this as a single range. */
4559 in_p = 1, low = low0, high = high0;
4560 else if (lowequal && highequal)
4561 in_p = 0, low = high = 0;
4562 else if (subset && lowequal)
4564 low = range_successor (high1);
4569 /* We are in the weird situation where high0 > high1 but
4570 high1 has no successor. Punt. */
4574 else if (! subset || highequal)
4577 high = range_predecessor (low1);
4581 /* low0 < low1 but low1 has no predecessor. Punt. */
4589 else if (! in0_p && in1_p)
4591 /* If they don't overlap, the result is the second range. If the second
4592 is a subset of the first, the result is false. Otherwise,
4593 the range starts just after the first range and ends at the
4594 end of the second. */
4596 in_p = 1, low = low1, high = high1;
4597 else if (subset || highequal)
4598 in_p = 0, low = high = 0;
4601 low = range_successor (high0);
4606 /* high1 > high0 but high0 has no successor. Punt. */
4614 /* The case where we are excluding both ranges. Here the complex case
4615 is if they don't overlap. In that case, the only time we have a
4616 range is if they are adjacent. If the second is a subset of the
4617 first, the result is the first. Otherwise, the range to exclude
4618 starts at the beginning of the first range and ends at the end of the
4622 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4623 range_successor (high0),
4625 in_p = 0, low = low0, high = high1;
4628 /* Canonicalize - [min, x] into - [-, x]. */
4629 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4630 switch (TREE_CODE (TREE_TYPE (low0)))
4633 if (TYPE_PRECISION (TREE_TYPE (low0))
4634 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4638 if (tree_int_cst_equal (low0,
4639 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4643 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4644 && integer_zerop (low0))
4651 /* Canonicalize - [x, max] into - [x, -]. */
4652 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4653 switch (TREE_CODE (TREE_TYPE (high1)))
4656 if (TYPE_PRECISION (TREE_TYPE (high1))
4657 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4661 if (tree_int_cst_equal (high1,
4662 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4666 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4667 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4669 integer_one_node, 1)))
4676 /* The ranges might be also adjacent between the maximum and
4677 minimum values of the given type. For
4678 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4679 return + [x + 1, y - 1]. */
4680 if (low0 == 0 && high1 == 0)
4682 low = range_successor (high0);
4683 high = range_predecessor (low1);
4684 if (low == 0 || high == 0)
4694 in_p = 0, low = low0, high = high0;
4696 in_p = 0, low = low0, high = high1;
4699 *pin_p = in_p, *plow = low, *phigh = high;
4704 /* Subroutine of fold, looking inside expressions of the form
4705 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4706 of the COND_EXPR. This function is being used also to optimize
4707 A op B ? C : A, by reversing the comparison first.
4709 Return a folded expression whose code is not a COND_EXPR
4710 anymore, or NULL_TREE if no folding opportunity is found. */
4713 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4715 enum tree_code comp_code = TREE_CODE (arg0);
4716 tree arg00 = TREE_OPERAND (arg0, 0);
4717 tree arg01 = TREE_OPERAND (arg0, 1);
4718 tree arg1_type = TREE_TYPE (arg1);
4724 /* If we have A op 0 ? A : -A, consider applying the following
4727 A == 0? A : -A same as -A
4728 A != 0? A : -A same as A
4729 A >= 0? A : -A same as abs (A)
4730 A > 0? A : -A same as abs (A)
4731 A <= 0? A : -A same as -abs (A)
4732 A < 0? A : -A same as -abs (A)
4734 None of these transformations work for modes with signed
4735 zeros. If A is +/-0, the first two transformations will
4736 change the sign of the result (from +0 to -0, or vice
4737 versa). The last four will fix the sign of the result,
4738 even though the original expressions could be positive or
4739 negative, depending on the sign of A.
4741 Note that all these transformations are correct if A is
4742 NaN, since the two alternatives (A and -A) are also NaNs. */
4743 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4744 ? real_zerop (arg01)
4745 : integer_zerop (arg01))
4746 && ((TREE_CODE (arg2) == NEGATE_EXPR
4747 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4748 /* In the case that A is of the form X-Y, '-A' (arg2) may
4749 have already been folded to Y-X, check for that. */
4750 || (TREE_CODE (arg1) == MINUS_EXPR
4751 && TREE_CODE (arg2) == MINUS_EXPR
4752 && operand_equal_p (TREE_OPERAND (arg1, 0),
4753 TREE_OPERAND (arg2, 1), 0)
4754 && operand_equal_p (TREE_OPERAND (arg1, 1),
4755 TREE_OPERAND (arg2, 0), 0))))
4760 tem = fold_convert (arg1_type, arg1);
4761 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4764 return pedantic_non_lvalue (fold_convert (type, arg1));
4767 if (flag_trapping_math)
4772 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4773 arg1 = fold_convert (signed_type_for
4774 (TREE_TYPE (arg1)), arg1);
4775 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4776 return pedantic_non_lvalue (fold_convert (type, tem));
4779 if (flag_trapping_math)
4783 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4784 arg1 = fold_convert (signed_type_for
4785 (TREE_TYPE (arg1)), arg1);
4786 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4787 return negate_expr (fold_convert (type, tem));
4789 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4793 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4794 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4795 both transformations are correct when A is NaN: A != 0
4796 is then true, and A == 0 is false. */
4798 if (integer_zerop (arg01) && integer_zerop (arg2))
4800 if (comp_code == NE_EXPR)
4801 return pedantic_non_lvalue (fold_convert (type, arg1));
4802 else if (comp_code == EQ_EXPR)
4803 return build_int_cst (type, 0);
4806 /* Try some transformations of A op B ? A : B.
4808 A == B? A : B same as B
4809 A != B? A : B same as A
4810 A >= B? A : B same as max (A, B)
4811 A > B? A : B same as max (B, A)
4812 A <= B? A : B same as min (A, B)
4813 A < B? A : B same as min (B, A)
4815 As above, these transformations don't work in the presence
4816 of signed zeros. For example, if A and B are zeros of
4817 opposite sign, the first two transformations will change
4818 the sign of the result. In the last four, the original
4819 expressions give different results for (A=+0, B=-0) and
4820 (A=-0, B=+0), but the transformed expressions do not.
4822 The first two transformations are correct if either A or B
4823 is a NaN. In the first transformation, the condition will
4824 be false, and B will indeed be chosen. In the case of the
4825 second transformation, the condition A != B will be true,
4826 and A will be chosen.
4828 The conversions to max() and min() are not correct if B is
4829 a number and A is not. The conditions in the original
4830 expressions will be false, so all four give B. The min()
4831 and max() versions would give a NaN instead. */
4832 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4833 /* Avoid these transformations if the COND_EXPR may be used
4834 as an lvalue in the C++ front-end. PR c++/19199. */
4836 || (strcmp (lang_hooks.name, "GNU C++") != 0
4837 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4838 || ! maybe_lvalue_p (arg1)
4839 || ! maybe_lvalue_p (arg2)))
4841 tree comp_op0 = arg00;
4842 tree comp_op1 = arg01;
4843 tree comp_type = TREE_TYPE (comp_op0);
4845 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4846 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4856 return pedantic_non_lvalue (fold_convert (type, arg2));
4858 return pedantic_non_lvalue (fold_convert (type, arg1));
4863 /* In C++ a ?: expression can be an lvalue, so put the
4864 operand which will be used if they are equal first
4865 so that we can convert this back to the
4866 corresponding COND_EXPR. */
4867 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4869 comp_op0 = fold_convert (comp_type, comp_op0);
4870 comp_op1 = fold_convert (comp_type, comp_op1);
4871 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4872 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4873 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4874 return pedantic_non_lvalue (fold_convert (type, tem));
4881 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4883 comp_op0 = fold_convert (comp_type, comp_op0);
4884 comp_op1 = fold_convert (comp_type, comp_op1);
4885 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4886 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4887 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4888 return pedantic_non_lvalue (fold_convert (type, tem));
4892 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4893 return pedantic_non_lvalue (fold_convert (type, arg2));
4896 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4897 return pedantic_non_lvalue (fold_convert (type, arg1));
4900 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4905 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4906 we might still be able to simplify this. For example,
4907 if C1 is one less or one more than C2, this might have started
4908 out as a MIN or MAX and been transformed by this function.
4909 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4911 if (INTEGRAL_TYPE_P (type)
4912 && TREE_CODE (arg01) == INTEGER_CST
4913 && TREE_CODE (arg2) == INTEGER_CST)
4917 /* We can replace A with C1 in this case. */
4918 arg1 = fold_convert (type, arg01);
4919 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4922 /* If C1 is C2 + 1, this is min(A, C2). */
4923 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4925 && operand_equal_p (arg01,
4926 const_binop (PLUS_EXPR, arg2,
4927 build_int_cst (type, 1), 0),
4929 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4934 /* If C1 is C2 - 1, this is min(A, C2). */
4935 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4937 && operand_equal_p (arg01,
4938 const_binop (MINUS_EXPR, arg2,
4939 build_int_cst (type, 1), 0),
4941 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4946 /* If C1 is C2 - 1, this is max(A, C2). */
4947 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4949 && operand_equal_p (arg01,
4950 const_binop (MINUS_EXPR, arg2,
4951 build_int_cst (type, 1), 0),
4953 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4958 /* If C1 is C2 + 1, this is max(A, C2). */
4959 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4961 && operand_equal_p (arg01,
4962 const_binop (PLUS_EXPR, arg2,
4963 build_int_cst (type, 1), 0),
4965 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4979 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4980 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4983 /* EXP is some logical combination of boolean tests. See if we can
4984 merge it into some range test. Return the new tree if so. */
4987 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4989 int or_op = (code == TRUTH_ORIF_EXPR
4990 || code == TRUTH_OR_EXPR);
4991 int in0_p, in1_p, in_p;
4992 tree low0, low1, low, high0, high1, high;
4993 bool strict_overflow_p = false;
4994 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4995 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4997 const char * const warnmsg = G_("assuming signed overflow does not occur "
4998 "when simplifying range test");
5000 /* If this is an OR operation, invert both sides; we will invert
5001 again at the end. */
5003 in0_p = ! in0_p, in1_p = ! in1_p;
5005 /* If both expressions are the same, if we can merge the ranges, and we
5006 can build the range test, return it or it inverted. If one of the
5007 ranges is always true or always false, consider it to be the same
5008 expression as the other. */
5009 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5010 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5012 && 0 != (tem = (build_range_check (type,
5014 : rhs != 0 ? rhs : integer_zero_node,
5017 if (strict_overflow_p)
5018 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5019 return or_op ? invert_truthvalue (tem) : tem;
5022 /* On machines where the branch cost is expensive, if this is a
5023 short-circuited branch and the underlying object on both sides
5024 is the same, make a non-short-circuit operation. */
5025 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5026 && lhs != 0 && rhs != 0
5027 && (code == TRUTH_ANDIF_EXPR
5028 || code == TRUTH_ORIF_EXPR)
5029 && operand_equal_p (lhs, rhs, 0))
5031 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5032 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5033 which cases we can't do this. */
5034 if (simple_operand_p (lhs))
5035 return build2 (code == TRUTH_ANDIF_EXPR
5036 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5039 else if (lang_hooks.decls.global_bindings_p () == 0
5040 && ! CONTAINS_PLACEHOLDER_P (lhs))
5042 tree common = save_expr (lhs);
5044 if (0 != (lhs = build_range_check (type, common,
5045 or_op ? ! in0_p : in0_p,
5047 && (0 != (rhs = build_range_check (type, common,
5048 or_op ? ! in1_p : in1_p,
5051 if (strict_overflow_p)
5052 fold_overflow_warning (warnmsg,
5053 WARN_STRICT_OVERFLOW_COMPARISON);
5054 return build2 (code == TRUTH_ANDIF_EXPR
5055 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5064 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5065 bit value. Arrange things so the extra bits will be set to zero if and
5066 only if C is signed-extended to its full width. If MASK is nonzero,
5067 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5070 unextend (tree c, int p, int unsignedp, tree mask)
5072 tree type = TREE_TYPE (c);
5073 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5076 if (p == modesize || unsignedp)
5079 /* We work by getting just the sign bit into the low-order bit, then
5080 into the high-order bit, then sign-extend. We then XOR that value
5082 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5083 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5085 /* We must use a signed type in order to get an arithmetic right shift.
5086 However, we must also avoid introducing accidental overflows, so that
5087 a subsequent call to integer_zerop will work. Hence we must
5088 do the type conversion here. At this point, the constant is either
5089 zero or one, and the conversion to a signed type can never overflow.
5090 We could get an overflow if this conversion is done anywhere else. */
5091 if (TYPE_UNSIGNED (type))
5092 temp = fold_convert (signed_type_for (type), temp);
5094 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5095 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5097 temp = const_binop (BIT_AND_EXPR, temp,
5098 fold_convert (TREE_TYPE (c), mask), 0);
5099 /* If necessary, convert the type back to match the type of C. */
5100 if (TYPE_UNSIGNED (type))
5101 temp = fold_convert (type, temp);
5103 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5106 /* Find ways of folding logical expressions of LHS and RHS:
5107 Try to merge two comparisons to the same innermost item.
5108 Look for range tests like "ch >= '0' && ch <= '9'".
5109 Look for combinations of simple terms on machines with expensive branches
5110 and evaluate the RHS unconditionally.
5112 For example, if we have p->a == 2 && p->b == 4 and we can make an
5113 object large enough to span both A and B, we can do this with a comparison
5114 against the object ANDed with the a mask.
5116 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5117 operations to do this with one comparison.
5119 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5120 function and the one above.
5122 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5123 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5125 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5128 We return the simplified tree or 0 if no optimization is possible. */
5131 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5133 /* If this is the "or" of two comparisons, we can do something if
5134 the comparisons are NE_EXPR. If this is the "and", we can do something
5135 if the comparisons are EQ_EXPR. I.e.,
5136 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5138 WANTED_CODE is this operation code. For single bit fields, we can
5139 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5140 comparison for one-bit fields. */
5142 enum tree_code wanted_code;
5143 enum tree_code lcode, rcode;
5144 tree ll_arg, lr_arg, rl_arg, rr_arg;
5145 tree ll_inner, lr_inner, rl_inner, rr_inner;
5146 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5147 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5148 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5149 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5150 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5151 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5152 enum machine_mode lnmode, rnmode;
5153 tree ll_mask, lr_mask, rl_mask, rr_mask;
5154 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5155 tree l_const, r_const;
5156 tree lntype, rntype, result;
5157 int first_bit, end_bit;
5159 tree orig_lhs = lhs, orig_rhs = rhs;
5160 enum tree_code orig_code = code;
5162 /* Start by getting the comparison codes. Fail if anything is volatile.
5163 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5164 it were surrounded with a NE_EXPR. */
5166 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5169 lcode = TREE_CODE (lhs);
5170 rcode = TREE_CODE (rhs);
5172 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5174 lhs = build2 (NE_EXPR, truth_type, lhs,
5175 build_int_cst (TREE_TYPE (lhs), 0));
5179 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5181 rhs = build2 (NE_EXPR, truth_type, rhs,
5182 build_int_cst (TREE_TYPE (rhs), 0));
5186 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5187 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5190 ll_arg = TREE_OPERAND (lhs, 0);
5191 lr_arg = TREE_OPERAND (lhs, 1);
5192 rl_arg = TREE_OPERAND (rhs, 0);
5193 rr_arg = TREE_OPERAND (rhs, 1);
5195 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5196 if (simple_operand_p (ll_arg)
5197 && simple_operand_p (lr_arg))
5200 if (operand_equal_p (ll_arg, rl_arg, 0)
5201 && operand_equal_p (lr_arg, rr_arg, 0))
5203 result = combine_comparisons (code, lcode, rcode,
5204 truth_type, ll_arg, lr_arg);
5208 else if (operand_equal_p (ll_arg, rr_arg, 0)
5209 && operand_equal_p (lr_arg, rl_arg, 0))
5211 result = combine_comparisons (code, lcode,
5212 swap_tree_comparison (rcode),
5213 truth_type, ll_arg, lr_arg);
5219 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5220 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5222 /* If the RHS can be evaluated unconditionally and its operands are
5223 simple, it wins to evaluate the RHS unconditionally on machines
5224 with expensive branches. In this case, this isn't a comparison
5225 that can be merged. Avoid doing this if the RHS is a floating-point
5226 comparison since those can trap. */
5228 if (BRANCH_COST >= 2
5229 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5230 && simple_operand_p (rl_arg)
5231 && simple_operand_p (rr_arg))
5233 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5234 if (code == TRUTH_OR_EXPR
5235 && lcode == NE_EXPR && integer_zerop (lr_arg)
5236 && rcode == NE_EXPR && integer_zerop (rr_arg)
5237 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5238 return build2 (NE_EXPR, truth_type,
5239 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5241 build_int_cst (TREE_TYPE (ll_arg), 0));
5243 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5244 if (code == TRUTH_AND_EXPR
5245 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5246 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5247 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5248 return build2 (EQ_EXPR, truth_type,
5249 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5251 build_int_cst (TREE_TYPE (ll_arg), 0));
5253 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5255 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5256 return build2 (code, truth_type, lhs, rhs);
5261 /* See if the comparisons can be merged. Then get all the parameters for
5264 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5265 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5269 ll_inner = decode_field_reference (ll_arg,
5270 &ll_bitsize, &ll_bitpos, &ll_mode,
5271 &ll_unsignedp, &volatilep, &ll_mask,
5273 lr_inner = decode_field_reference (lr_arg,
5274 &lr_bitsize, &lr_bitpos, &lr_mode,
5275 &lr_unsignedp, &volatilep, &lr_mask,
5277 rl_inner = decode_field_reference (rl_arg,
5278 &rl_bitsize, &rl_bitpos, &rl_mode,
5279 &rl_unsignedp, &volatilep, &rl_mask,
5281 rr_inner = decode_field_reference (rr_arg,
5282 &rr_bitsize, &rr_bitpos, &rr_mode,
5283 &rr_unsignedp, &volatilep, &rr_mask,
5286 /* It must be true that the inner operation on the lhs of each
5287 comparison must be the same if we are to be able to do anything.
5288 Then see if we have constants. If not, the same must be true for
5290 if (volatilep || ll_inner == 0 || rl_inner == 0
5291 || ! operand_equal_p (ll_inner, rl_inner, 0))
5294 if (TREE_CODE (lr_arg) == INTEGER_CST
5295 && TREE_CODE (rr_arg) == INTEGER_CST)
5296 l_const = lr_arg, r_const = rr_arg;
5297 else if (lr_inner == 0 || rr_inner == 0
5298 || ! operand_equal_p (lr_inner, rr_inner, 0))
5301 l_const = r_const = 0;
5303 /* If either comparison code is not correct for our logical operation,
5304 fail. However, we can convert a one-bit comparison against zero into
5305 the opposite comparison against that bit being set in the field. */
5307 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5308 if (lcode != wanted_code)
5310 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5312 /* Make the left operand unsigned, since we are only interested
5313 in the value of one bit. Otherwise we are doing the wrong
5322 /* This is analogous to the code for l_const above. */
5323 if (rcode != wanted_code)
5325 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5334 /* See if we can find a mode that contains both fields being compared on
5335 the left. If we can't, fail. Otherwise, update all constants and masks
5336 to be relative to a field of that size. */
5337 first_bit = MIN (ll_bitpos, rl_bitpos);
5338 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5339 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5340 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5342 if (lnmode == VOIDmode)
5345 lnbitsize = GET_MODE_BITSIZE (lnmode);
5346 lnbitpos = first_bit & ~ (lnbitsize - 1);
5347 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5348 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5350 if (BYTES_BIG_ENDIAN)
5352 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5353 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5356 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5357 size_int (xll_bitpos), 0);
5358 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5359 size_int (xrl_bitpos), 0);
5363 l_const = fold_convert (lntype, l_const);
5364 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5365 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5366 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5367 fold_build1 (BIT_NOT_EXPR,
5371 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5373 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5378 r_const = fold_convert (lntype, r_const);
5379 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5380 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5381 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5382 fold_build1 (BIT_NOT_EXPR,
5386 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5388 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5392 /* If the right sides are not constant, do the same for it. Also,
5393 disallow this optimization if a size or signedness mismatch occurs
5394 between the left and right sides. */
5397 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5398 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5399 /* Make sure the two fields on the right
5400 correspond to the left without being swapped. */
5401 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5404 first_bit = MIN (lr_bitpos, rr_bitpos);
5405 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5406 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5407 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5409 if (rnmode == VOIDmode)
5412 rnbitsize = GET_MODE_BITSIZE (rnmode);
5413 rnbitpos = first_bit & ~ (rnbitsize - 1);
5414 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5415 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5417 if (BYTES_BIG_ENDIAN)
5419 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5420 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5423 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5424 size_int (xlr_bitpos), 0);
5425 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5426 size_int (xrr_bitpos), 0);
5428 /* Make a mask that corresponds to both fields being compared.
5429 Do this for both items being compared. If the operands are the
5430 same size and the bits being compared are in the same position
5431 then we can do this by masking both and comparing the masked
5433 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5434 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5435 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5437 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5438 ll_unsignedp || rl_unsignedp);
5439 if (! all_ones_mask_p (ll_mask, lnbitsize))
5440 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5442 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5443 lr_unsignedp || rr_unsignedp);
5444 if (! all_ones_mask_p (lr_mask, rnbitsize))
5445 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5447 return build2 (wanted_code, truth_type, lhs, rhs);
5450 /* There is still another way we can do something: If both pairs of
5451 fields being compared are adjacent, we may be able to make a wider
5452 field containing them both.
5454 Note that we still must mask the lhs/rhs expressions. Furthermore,
5455 the mask must be shifted to account for the shift done by
5456 make_bit_field_ref. */
5457 if ((ll_bitsize + ll_bitpos == rl_bitpos
5458 && lr_bitsize + lr_bitpos == rr_bitpos)
5459 || (ll_bitpos == rl_bitpos + rl_bitsize
5460 && lr_bitpos == rr_bitpos + rr_bitsize))
5464 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5465 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5466 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5467 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5469 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5470 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5471 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5472 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5474 /* Convert to the smaller type before masking out unwanted bits. */
5476 if (lntype != rntype)
5478 if (lnbitsize > rnbitsize)
5480 lhs = fold_convert (rntype, lhs);
5481 ll_mask = fold_convert (rntype, ll_mask);
5484 else if (lnbitsize < rnbitsize)
5486 rhs = fold_convert (lntype, rhs);
5487 lr_mask = fold_convert (lntype, lr_mask);
5492 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5493 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5495 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5496 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5498 return build2 (wanted_code, truth_type, lhs, rhs);
5504 /* Handle the case of comparisons with constants. If there is something in
5505 common between the masks, those bits of the constants must be the same.
5506 If not, the condition is always false. Test for this to avoid generating
5507 incorrect code below. */
5508 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5509 if (! integer_zerop (result)
5510 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5511 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5513 if (wanted_code == NE_EXPR)
5515 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5516 return constant_boolean_node (true, truth_type);
5520 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5521 return constant_boolean_node (false, truth_type);
5525 /* Construct the expression we will return. First get the component
5526 reference we will make. Unless the mask is all ones the width of
5527 that field, perform the mask operation. Then compare with the
5529 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5530 ll_unsignedp || rl_unsignedp);
5532 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5533 if (! all_ones_mask_p (ll_mask, lnbitsize))
5534 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5536 return build2 (wanted_code, truth_type, result,
5537 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5540 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5544 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5547 enum tree_code op_code;
5548 tree comp_const = op1;
5550 int consts_equal, consts_lt;
5553 STRIP_SIGN_NOPS (arg0);
5555 op_code = TREE_CODE (arg0);
5556 minmax_const = TREE_OPERAND (arg0, 1);
5557 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5558 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5559 inner = TREE_OPERAND (arg0, 0);
5561 /* If something does not permit us to optimize, return the original tree. */
5562 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5563 || TREE_CODE (comp_const) != INTEGER_CST
5564 || TREE_OVERFLOW (comp_const)
5565 || TREE_CODE (minmax_const) != INTEGER_CST
5566 || TREE_OVERFLOW (minmax_const))
5569 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5570 and GT_EXPR, doing the rest with recursive calls using logical
5574 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5576 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5579 return invert_truthvalue (tem);
5585 fold_build2 (TRUTH_ORIF_EXPR, type,
5586 optimize_minmax_comparison
5587 (EQ_EXPR, type, arg0, comp_const),
5588 optimize_minmax_comparison
5589 (GT_EXPR, type, arg0, comp_const));
5592 if (op_code == MAX_EXPR && consts_equal)
5593 /* MAX (X, 0) == 0 -> X <= 0 */
5594 return fold_build2 (LE_EXPR, type, inner, comp_const);
5596 else if (op_code == MAX_EXPR && consts_lt)
5597 /* MAX (X, 0) == 5 -> X == 5 */
5598 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5600 else if (op_code == MAX_EXPR)
5601 /* MAX (X, 0) == -1 -> false */
5602 return omit_one_operand (type, integer_zero_node, inner);
5604 else if (consts_equal)
5605 /* MIN (X, 0) == 0 -> X >= 0 */
5606 return fold_build2 (GE_EXPR, type, inner, comp_const);
5609 /* MIN (X, 0) == 5 -> false */
5610 return omit_one_operand (type, integer_zero_node, inner);
5613 /* MIN (X, 0) == -1 -> X == -1 */
5614 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5617 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5618 /* MAX (X, 0) > 0 -> X > 0
5619 MAX (X, 0) > 5 -> X > 5 */
5620 return fold_build2 (GT_EXPR, type, inner, comp_const);
5622 else if (op_code == MAX_EXPR)
5623 /* MAX (X, 0) > -1 -> true */
5624 return omit_one_operand (type, integer_one_node, inner);
5626 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5627 /* MIN (X, 0) > 0 -> false
5628 MIN (X, 0) > 5 -> false */
5629 return omit_one_operand (type, integer_zero_node, inner);
5632 /* MIN (X, 0) > -1 -> X > -1 */
5633 return fold_build2 (GT_EXPR, type, inner, comp_const);
5640 /* T is an integer expression that is being multiplied, divided, or taken a
5641 modulus (CODE says which and what kind of divide or modulus) by a
5642 constant C. See if we can eliminate that operation by folding it with
5643 other operations already in T. WIDE_TYPE, if non-null, is a type that
5644 should be used for the computation if wider than our type.
5646 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5647 (X * 2) + (Y * 4). We must, however, be assured that either the original
5648 expression would not overflow or that overflow is undefined for the type
5649 in the language in question.
5651 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5652 the machine has a multiply-accumulate insn or that this is part of an
5653 addressing calculation.
5655 If we return a non-null expression, it is an equivalent form of the
5656 original computation, but need not be in the original type.
5658 We set *STRICT_OVERFLOW_P to true if the return values depends on
5659 signed overflow being undefined. Otherwise we do not change
5660 *STRICT_OVERFLOW_P. */
5663 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5664 bool *strict_overflow_p)
5666 /* To avoid exponential search depth, refuse to allow recursion past
5667 three levels. Beyond that (1) it's highly unlikely that we'll find
5668 something interesting and (2) we've probably processed it before
5669 when we built the inner expression. */
5678 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5685 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5686 bool *strict_overflow_p)
5688 tree type = TREE_TYPE (t);
5689 enum tree_code tcode = TREE_CODE (t);
5690 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5691 > GET_MODE_SIZE (TYPE_MODE (type)))
5692 ? wide_type : type);
5694 int same_p = tcode == code;
5695 tree op0 = NULL_TREE, op1 = NULL_TREE;
5696 bool sub_strict_overflow_p;
5698 /* Don't deal with constants of zero here; they confuse the code below. */
5699 if (integer_zerop (c))
5702 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5703 op0 = TREE_OPERAND (t, 0);
5705 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5706 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5708 /* Note that we need not handle conditional operations here since fold
5709 already handles those cases. So just do arithmetic here. */
5713 /* For a constant, we can always simplify if we are a multiply
5714 or (for divide and modulus) if it is a multiple of our constant. */
5715 if (code == MULT_EXPR
5716 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5717 return const_binop (code, fold_convert (ctype, t),
5718 fold_convert (ctype, c), 0);
5721 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5722 /* If op0 is an expression ... */
5723 if ((COMPARISON_CLASS_P (op0)
5724 || UNARY_CLASS_P (op0)
5725 || BINARY_CLASS_P (op0)
5726 || VL_EXP_CLASS_P (op0)
5727 || EXPRESSION_CLASS_P (op0))
5728 /* ... and is unsigned, and its type is smaller than ctype,
5729 then we cannot pass through as widening. */
5730 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5731 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5732 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5733 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5734 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5735 /* ... or this is a truncation (t is narrower than op0),
5736 then we cannot pass through this narrowing. */
5737 || (GET_MODE_SIZE (TYPE_MODE (type))
5738 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5739 /* ... or signedness changes for division or modulus,
5740 then we cannot pass through this conversion. */
5741 || (code != MULT_EXPR
5742 && (TYPE_UNSIGNED (ctype)
5743 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5746 /* Pass the constant down and see if we can make a simplification. If
5747 we can, replace this expression with the inner simplification for
5748 possible later conversion to our or some other type. */
5749 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5750 && TREE_CODE (t2) == INTEGER_CST
5751 && !TREE_OVERFLOW (t2)
5752 && (0 != (t1 = extract_muldiv (op0, t2, code,
5754 ? ctype : NULL_TREE,
5755 strict_overflow_p))))
5760 /* If widening the type changes it from signed to unsigned, then we
5761 must avoid building ABS_EXPR itself as unsigned. */
5762 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5764 tree cstype = (*signed_type_for) (ctype);
5765 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5768 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5769 return fold_convert (ctype, t1);
5775 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5777 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5780 case MIN_EXPR: case MAX_EXPR:
5781 /* If widening the type changes the signedness, then we can't perform
5782 this optimization as that changes the result. */
5783 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5786 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5787 sub_strict_overflow_p = false;
5788 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5789 &sub_strict_overflow_p)) != 0
5790 && (t2 = extract_muldiv (op1, c, code, wide_type,
5791 &sub_strict_overflow_p)) != 0)
5793 if (tree_int_cst_sgn (c) < 0)
5794 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5795 if (sub_strict_overflow_p)
5796 *strict_overflow_p = true;
5797 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5798 fold_convert (ctype, t2));
5802 case LSHIFT_EXPR: case RSHIFT_EXPR:
5803 /* If the second operand is constant, this is a multiplication
5804 or floor division, by a power of two, so we can treat it that
5805 way unless the multiplier or divisor overflows. Signed
5806 left-shift overflow is implementation-defined rather than
5807 undefined in C90, so do not convert signed left shift into
5809 if (TREE_CODE (op1) == INTEGER_CST
5810 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5811 /* const_binop may not detect overflow correctly,
5812 so check for it explicitly here. */
5813 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5814 && TREE_INT_CST_HIGH (op1) == 0
5815 && 0 != (t1 = fold_convert (ctype,
5816 const_binop (LSHIFT_EXPR,
5819 && !TREE_OVERFLOW (t1))
5820 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5821 ? MULT_EXPR : FLOOR_DIV_EXPR,
5822 ctype, fold_convert (ctype, op0), t1),
5823 c, code, wide_type, strict_overflow_p);
5826 case PLUS_EXPR: case MINUS_EXPR:
5827 /* See if we can eliminate the operation on both sides. If we can, we
5828 can return a new PLUS or MINUS. If we can't, the only remaining
5829 cases where we can do anything are if the second operand is a
5831 sub_strict_overflow_p = false;
5832 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5833 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5834 if (t1 != 0 && t2 != 0
5835 && (code == MULT_EXPR
5836 /* If not multiplication, we can only do this if both operands
5837 are divisible by c. */
5838 || (multiple_of_p (ctype, op0, c)
5839 && multiple_of_p (ctype, op1, c))))
5841 if (sub_strict_overflow_p)
5842 *strict_overflow_p = true;
5843 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5844 fold_convert (ctype, t2));
5847 /* If this was a subtraction, negate OP1 and set it to be an addition.
5848 This simplifies the logic below. */
5849 if (tcode == MINUS_EXPR)
5850 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5852 if (TREE_CODE (op1) != INTEGER_CST)
5855 /* If either OP1 or C are negative, this optimization is not safe for
5856 some of the division and remainder types while for others we need
5857 to change the code. */
5858 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5860 if (code == CEIL_DIV_EXPR)
5861 code = FLOOR_DIV_EXPR;
5862 else if (code == FLOOR_DIV_EXPR)
5863 code = CEIL_DIV_EXPR;
5864 else if (code != MULT_EXPR
5865 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5869 /* If it's a multiply or a division/modulus operation of a multiple
5870 of our constant, do the operation and verify it doesn't overflow. */
5871 if (code == MULT_EXPR
5872 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5874 op1 = const_binop (code, fold_convert (ctype, op1),
5875 fold_convert (ctype, c), 0);
5876 /* We allow the constant to overflow with wrapping semantics. */
5878 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5884 /* If we have an unsigned type is not a sizetype, we cannot widen
5885 the operation since it will change the result if the original
5886 computation overflowed. */
5887 if (TYPE_UNSIGNED (ctype)
5888 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5892 /* If we were able to eliminate our operation from the first side,
5893 apply our operation to the second side and reform the PLUS. */
5894 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5895 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5897 /* The last case is if we are a multiply. In that case, we can
5898 apply the distributive law to commute the multiply and addition
5899 if the multiplication of the constants doesn't overflow. */
5900 if (code == MULT_EXPR)
5901 return fold_build2 (tcode, ctype,
5902 fold_build2 (code, ctype,
5903 fold_convert (ctype, op0),
5904 fold_convert (ctype, c)),
5910 /* We have a special case here if we are doing something like
5911 (C * 8) % 4 since we know that's zero. */
5912 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5913 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5914 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5915 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5916 return omit_one_operand (type, integer_zero_node, op0);
5918 /* ... fall through ... */
5920 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5921 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5922 /* If we can extract our operation from the LHS, do so and return a
5923 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5924 do something only if the second operand is a constant. */
5926 && (t1 = extract_muldiv (op0, c, code, wide_type,
5927 strict_overflow_p)) != 0)
5928 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5929 fold_convert (ctype, op1));
5930 else if (tcode == MULT_EXPR && code == MULT_EXPR
5931 && (t1 = extract_muldiv (op1, c, code, wide_type,
5932 strict_overflow_p)) != 0)
5933 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5934 fold_convert (ctype, t1));
5935 else if (TREE_CODE (op1) != INTEGER_CST)
5938 /* If these are the same operation types, we can associate them
5939 assuming no overflow. */
5941 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5942 fold_convert (ctype, c), 0))
5943 && !TREE_OVERFLOW (t1))
5944 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5946 /* If these operations "cancel" each other, we have the main
5947 optimizations of this pass, which occur when either constant is a
5948 multiple of the other, in which case we replace this with either an
5949 operation or CODE or TCODE.
5951 If we have an unsigned type that is not a sizetype, we cannot do
5952 this since it will change the result if the original computation
5954 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5955 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5956 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5957 || (tcode == MULT_EXPR
5958 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5959 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5961 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5963 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5964 *strict_overflow_p = true;
5965 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5966 fold_convert (ctype,
5967 const_binop (TRUNC_DIV_EXPR,
5970 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5972 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5973 *strict_overflow_p = true;
5974 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5975 fold_convert (ctype,
5976 const_binop (TRUNC_DIV_EXPR,
5989 /* Return a node which has the indicated constant VALUE (either 0 or
5990 1), and is of the indicated TYPE. */
5993 constant_boolean_node (int value, tree type)
5995 if (type == integer_type_node)
5996 return value ? integer_one_node : integer_zero_node;
5997 else if (type == boolean_type_node)
5998 return value ? boolean_true_node : boolean_false_node;
6000 return build_int_cst (type, value);
6004 /* Return true if expr looks like an ARRAY_REF and set base and
6005 offset to the appropriate trees. If there is no offset,
6006 offset is set to NULL_TREE. Base will be canonicalized to
6007 something you can get the element type from using
6008 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
6009 in bytes to the base in sizetype. */
6012 extract_array_ref (tree expr, tree *base, tree *offset)
6014 /* One canonical form is a PLUS_EXPR with the first
6015 argument being an ADDR_EXPR with a possible NOP_EXPR
6017 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
6019 tree op0 = TREE_OPERAND (expr, 0);
6020 tree inner_base, dummy1;
6021 /* Strip NOP_EXPRs here because the C frontends and/or
6022 folders present us (int *)&x.a p+ 4 possibly. */
6024 if (extract_array_ref (op0, &inner_base, &dummy1))
6027 *offset = fold_convert (sizetype, TREE_OPERAND (expr, 1));
6028 if (dummy1 != NULL_TREE)
6029 *offset = fold_build2 (PLUS_EXPR, sizetype,
6034 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
6035 which we transform into an ADDR_EXPR with appropriate
6036 offset. For other arguments to the ADDR_EXPR we assume
6037 zero offset and as such do not care about the ADDR_EXPR
6038 type and strip possible nops from it. */
6039 else if (TREE_CODE (expr) == ADDR_EXPR)
6041 tree op0 = TREE_OPERAND (expr, 0);
6042 if (TREE_CODE (op0) == ARRAY_REF)
6044 tree idx = TREE_OPERAND (op0, 1);
6045 *base = TREE_OPERAND (op0, 0);
6046 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6047 array_ref_element_size (op0));
6048 *offset = fold_convert (sizetype, *offset);
6052 /* Handle array-to-pointer decay as &a. */
6053 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6054 *base = TREE_OPERAND (expr, 0);
6057 *offset = NULL_TREE;
6061 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6062 else if (SSA_VAR_P (expr)
6063 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6066 *offset = NULL_TREE;
6074 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6075 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6076 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6077 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6078 COND is the first argument to CODE; otherwise (as in the example
6079 given here), it is the second argument. TYPE is the type of the
6080 original expression. Return NULL_TREE if no simplification is
6084 fold_binary_op_with_conditional_arg (enum tree_code code,
6085 tree type, tree op0, tree op1,
6086 tree cond, tree arg, int cond_first_p)
6088 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6089 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6090 tree test, true_value, false_value;
6091 tree lhs = NULL_TREE;
6092 tree rhs = NULL_TREE;
6094 /* This transformation is only worthwhile if we don't have to wrap
6095 arg in a SAVE_EXPR, and the operation can be simplified on at least
6096 one of the branches once its pushed inside the COND_EXPR. */
6097 if (!TREE_CONSTANT (arg))
6100 if (TREE_CODE (cond) == COND_EXPR)
6102 test = TREE_OPERAND (cond, 0);
6103 true_value = TREE_OPERAND (cond, 1);
6104 false_value = TREE_OPERAND (cond, 2);
6105 /* If this operand throws an expression, then it does not make
6106 sense to try to perform a logical or arithmetic operation
6108 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6110 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6115 tree testtype = TREE_TYPE (cond);
6117 true_value = constant_boolean_node (true, testtype);
6118 false_value = constant_boolean_node (false, testtype);
6121 arg = fold_convert (arg_type, arg);
6124 true_value = fold_convert (cond_type, true_value);
6126 lhs = fold_build2 (code, type, true_value, arg);
6128 lhs = fold_build2 (code, type, arg, true_value);
6132 false_value = fold_convert (cond_type, false_value);
6134 rhs = fold_build2 (code, type, false_value, arg);
6136 rhs = fold_build2 (code, type, arg, false_value);
6139 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6140 return fold_convert (type, test);
6144 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6146 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6147 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6148 ADDEND is the same as X.
6150 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6151 and finite. The problematic cases are when X is zero, and its mode
6152 has signed zeros. In the case of rounding towards -infinity,
6153 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6154 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6157 fold_real_zero_addition_p (tree type, tree addend, int negate)
6159 if (!real_zerop (addend))
6162 /* Don't allow the fold with -fsignaling-nans. */
6163 if (HONOR_SNANS (TYPE_MODE (type)))
6166 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6167 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6170 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6171 if (TREE_CODE (addend) == REAL_CST
6172 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6175 /* The mode has signed zeros, and we have to honor their sign.
6176 In this situation, there is only one case we can return true for.
6177 X - 0 is the same as X unless rounding towards -infinity is
6179 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6182 /* Subroutine of fold() that checks comparisons of built-in math
6183 functions against real constants.
6185 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6186 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6187 is the type of the result and ARG0 and ARG1 are the operands of the
6188 comparison. ARG1 must be a TREE_REAL_CST.
6190 The function returns the constant folded tree if a simplification
6191 can be made, and NULL_TREE otherwise. */
6194 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6195 tree type, tree arg0, tree arg1)
6199 if (BUILTIN_SQRT_P (fcode))
6201 tree arg = CALL_EXPR_ARG (arg0, 0);
6202 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6204 c = TREE_REAL_CST (arg1);
6205 if (REAL_VALUE_NEGATIVE (c))
6207 /* sqrt(x) < y is always false, if y is negative. */
6208 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6209 return omit_one_operand (type, integer_zero_node, arg);
6211 /* sqrt(x) > y is always true, if y is negative and we
6212 don't care about NaNs, i.e. negative values of x. */
6213 if (code == NE_EXPR || !HONOR_NANS (mode))
6214 return omit_one_operand (type, integer_one_node, arg);
6216 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6217 return fold_build2 (GE_EXPR, type, arg,
6218 build_real (TREE_TYPE (arg), dconst0));
6220 else if (code == GT_EXPR || code == GE_EXPR)
6224 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6225 real_convert (&c2, mode, &c2);
6227 if (REAL_VALUE_ISINF (c2))
6229 /* sqrt(x) > y is x == +Inf, when y is very large. */
6230 if (HONOR_INFINITIES (mode))
6231 return fold_build2 (EQ_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg), c2));
6234 /* sqrt(x) > y is always false, when y is very large
6235 and we don't care about infinities. */
6236 return omit_one_operand (type, integer_zero_node, arg);
6239 /* sqrt(x) > c is the same as x > c*c. */
6240 return fold_build2 (code, type, arg,
6241 build_real (TREE_TYPE (arg), c2));
6243 else if (code == LT_EXPR || code == LE_EXPR)
6247 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6248 real_convert (&c2, mode, &c2);
6250 if (REAL_VALUE_ISINF (c2))
6252 /* sqrt(x) < y is always true, when y is a very large
6253 value and we don't care about NaNs or Infinities. */
6254 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6255 return omit_one_operand (type, integer_one_node, arg);
6257 /* sqrt(x) < y is x != +Inf when y is very large and we
6258 don't care about NaNs. */
6259 if (! HONOR_NANS (mode))
6260 return fold_build2 (NE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg), c2));
6263 /* sqrt(x) < y is x >= 0 when y is very large and we
6264 don't care about Infinities. */
6265 if (! HONOR_INFINITIES (mode))
6266 return fold_build2 (GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg), dconst0));
6269 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6270 if (lang_hooks.decls.global_bindings_p () != 0
6271 || CONTAINS_PLACEHOLDER_P (arg))
6274 arg = save_expr (arg);
6275 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6276 fold_build2 (GE_EXPR, type, arg,
6277 build_real (TREE_TYPE (arg),
6279 fold_build2 (NE_EXPR, type, arg,
6280 build_real (TREE_TYPE (arg),
6284 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6285 if (! HONOR_NANS (mode))
6286 return fold_build2 (code, type, arg,
6287 build_real (TREE_TYPE (arg), c2));
6289 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6290 if (lang_hooks.decls.global_bindings_p () == 0
6291 && ! CONTAINS_PLACEHOLDER_P (arg))
6293 arg = save_expr (arg);
6294 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6295 fold_build2 (GE_EXPR, type, arg,
6296 build_real (TREE_TYPE (arg),
6298 fold_build2 (code, type, arg,
6299 build_real (TREE_TYPE (arg),
6308 /* Subroutine of fold() that optimizes comparisons against Infinities,
6309 either +Inf or -Inf.
6311 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6312 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6313 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6315 The function returns the constant folded tree if a simplification
6316 can be made, and NULL_TREE otherwise. */
6319 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6321 enum machine_mode mode;
6322 REAL_VALUE_TYPE max;
6326 mode = TYPE_MODE (TREE_TYPE (arg0));
6328 /* For negative infinity swap the sense of the comparison. */
6329 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6331 code = swap_tree_comparison (code);
6336 /* x > +Inf is always false, if with ignore sNANs. */
6337 if (HONOR_SNANS (mode))
6339 return omit_one_operand (type, integer_zero_node, arg0);
6342 /* x <= +Inf is always true, if we don't case about NaNs. */
6343 if (! HONOR_NANS (mode))
6344 return omit_one_operand (type, integer_one_node, arg0);
6346 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6347 if (lang_hooks.decls.global_bindings_p () == 0
6348 && ! CONTAINS_PLACEHOLDER_P (arg0))
6350 arg0 = save_expr (arg0);
6351 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6357 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6358 real_maxval (&max, neg, mode);
6359 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6360 arg0, build_real (TREE_TYPE (arg0), max));
6363 /* x < +Inf is always equal to x <= DBL_MAX. */
6364 real_maxval (&max, neg, mode);
6365 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6366 arg0, build_real (TREE_TYPE (arg0), max));
6369 /* x != +Inf is always equal to !(x > DBL_MAX). */
6370 real_maxval (&max, neg, mode);
6371 if (! HONOR_NANS (mode))
6372 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6373 arg0, build_real (TREE_TYPE (arg0), max));
6375 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6376 arg0, build_real (TREE_TYPE (arg0), max));
6377 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6386 /* Subroutine of fold() that optimizes comparisons of a division by
6387 a nonzero integer constant against an integer constant, i.e.
6390 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6391 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6392 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6394 The function returns the constant folded tree if a simplification
6395 can be made, and NULL_TREE otherwise. */
6398 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6400 tree prod, tmp, hi, lo;
6401 tree arg00 = TREE_OPERAND (arg0, 0);
6402 tree arg01 = TREE_OPERAND (arg0, 1);
6403 unsigned HOST_WIDE_INT lpart;
6404 HOST_WIDE_INT hpart;
6405 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6409 /* We have to do this the hard way to detect unsigned overflow.
6410 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6411 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6412 TREE_INT_CST_HIGH (arg01),
6413 TREE_INT_CST_LOW (arg1),
6414 TREE_INT_CST_HIGH (arg1),
6415 &lpart, &hpart, unsigned_p);
6416 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6418 neg_overflow = false;
6422 tmp = int_const_binop (MINUS_EXPR, arg01,
6423 build_int_cst (TREE_TYPE (arg01), 1), 0);
6426 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6427 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6428 TREE_INT_CST_HIGH (prod),
6429 TREE_INT_CST_LOW (tmp),
6430 TREE_INT_CST_HIGH (tmp),
6431 &lpart, &hpart, unsigned_p);
6432 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6433 -1, overflow | TREE_OVERFLOW (prod));
6435 else if (tree_int_cst_sgn (arg01) >= 0)
6437 tmp = int_const_binop (MINUS_EXPR, arg01,
6438 build_int_cst (TREE_TYPE (arg01), 1), 0);
6439 switch (tree_int_cst_sgn (arg1))
6442 neg_overflow = true;
6443 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6448 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6453 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6463 /* A negative divisor reverses the relational operators. */
6464 code = swap_tree_comparison (code);
6466 tmp = int_const_binop (PLUS_EXPR, arg01,
6467 build_int_cst (TREE_TYPE (arg01), 1), 0);
6468 switch (tree_int_cst_sgn (arg1))
6471 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6476 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6481 neg_overflow = true;
6482 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6494 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6495 return omit_one_operand (type, integer_zero_node, arg00);
6496 if (TREE_OVERFLOW (hi))
6497 return fold_build2 (GE_EXPR, type, arg00, lo);
6498 if (TREE_OVERFLOW (lo))
6499 return fold_build2 (LE_EXPR, type, arg00, hi);
6500 return build_range_check (type, arg00, 1, lo, hi);
6503 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6504 return omit_one_operand (type, integer_one_node, arg00);
6505 if (TREE_OVERFLOW (hi))
6506 return fold_build2 (LT_EXPR, type, arg00, lo);
6507 if (TREE_OVERFLOW (lo))
6508 return fold_build2 (GT_EXPR, type, arg00, hi);
6509 return build_range_check (type, arg00, 0, lo, hi);
6512 if (TREE_OVERFLOW (lo))
6514 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6515 return omit_one_operand (type, tmp, arg00);
6517 return fold_build2 (LT_EXPR, type, arg00, lo);
6520 if (TREE_OVERFLOW (hi))
6522 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6523 return omit_one_operand (type, tmp, arg00);
6525 return fold_build2 (LE_EXPR, type, arg00, hi);
6528 if (TREE_OVERFLOW (hi))
6530 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6531 return omit_one_operand (type, tmp, arg00);
6533 return fold_build2 (GT_EXPR, type, arg00, hi);
6536 if (TREE_OVERFLOW (lo))
6538 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6539 return omit_one_operand (type, tmp, arg00);
6541 return fold_build2 (GE_EXPR, type, arg00, lo);
6551 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6552 equality/inequality test, then return a simplified form of the test
6553 using a sign testing. Otherwise return NULL. TYPE is the desired
6557 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6560 /* If this is testing a single bit, we can optimize the test. */
6561 if ((code == NE_EXPR || code == EQ_EXPR)
6562 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6563 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6565 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6566 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6567 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6569 if (arg00 != NULL_TREE
6570 /* This is only a win if casting to a signed type is cheap,
6571 i.e. when arg00's type is not a partial mode. */
6572 && TYPE_PRECISION (TREE_TYPE (arg00))
6573 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6575 tree stype = signed_type_for (TREE_TYPE (arg00));
6576 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6577 result_type, fold_convert (stype, arg00),
6578 build_int_cst (stype, 0));
6585 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6586 equality/inequality test, then return a simplified form of
6587 the test using shifts and logical operations. Otherwise return
6588 NULL. TYPE is the desired result type. */
6591 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6594 /* If this is testing a single bit, we can optimize the test. */
6595 if ((code == NE_EXPR || code == EQ_EXPR)
6596 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6597 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6599 tree inner = TREE_OPERAND (arg0, 0);
6600 tree type = TREE_TYPE (arg0);
6601 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6602 enum machine_mode operand_mode = TYPE_MODE (type);
6604 tree signed_type, unsigned_type, intermediate_type;
6607 /* First, see if we can fold the single bit test into a sign-bit
6609 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6614 /* Otherwise we have (A & C) != 0 where C is a single bit,
6615 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6616 Similarly for (A & C) == 0. */
6618 /* If INNER is a right shift of a constant and it plus BITNUM does
6619 not overflow, adjust BITNUM and INNER. */
6620 if (TREE_CODE (inner) == RSHIFT_EXPR
6621 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6622 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6623 && bitnum < TYPE_PRECISION (type)
6624 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6625 bitnum - TYPE_PRECISION (type)))
6627 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6628 inner = TREE_OPERAND (inner, 0);
6631 /* If we are going to be able to omit the AND below, we must do our
6632 operations as unsigned. If we must use the AND, we have a choice.
6633 Normally unsigned is faster, but for some machines signed is. */
6634 #ifdef LOAD_EXTEND_OP
6635 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6636 && !flag_syntax_only) ? 0 : 1;
6641 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6642 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6643 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6644 inner = fold_convert (intermediate_type, inner);
6647 inner = build2 (RSHIFT_EXPR, intermediate_type,
6648 inner, size_int (bitnum));
6650 one = build_int_cst (intermediate_type, 1);
6652 if (code == EQ_EXPR)
6653 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6655 /* Put the AND last so it can combine with more things. */
6656 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6658 /* Make sure to return the proper type. */
6659 inner = fold_convert (result_type, inner);
6666 /* Check whether we are allowed to reorder operands arg0 and arg1,
6667 such that the evaluation of arg1 occurs before arg0. */
6670 reorder_operands_p (tree arg0, tree arg1)
6672 if (! flag_evaluation_order)
6674 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6676 return ! TREE_SIDE_EFFECTS (arg0)
6677 && ! TREE_SIDE_EFFECTS (arg1);
6680 /* Test whether it is preferable two swap two operands, ARG0 and
6681 ARG1, for example because ARG0 is an integer constant and ARG1
6682 isn't. If REORDER is true, only recommend swapping if we can
6683 evaluate the operands in reverse order. */
6686 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6688 STRIP_SIGN_NOPS (arg0);
6689 STRIP_SIGN_NOPS (arg1);
6691 if (TREE_CODE (arg1) == INTEGER_CST)
6693 if (TREE_CODE (arg0) == INTEGER_CST)
6696 if (TREE_CODE (arg1) == REAL_CST)
6698 if (TREE_CODE (arg0) == REAL_CST)
6701 if (TREE_CODE (arg1) == COMPLEX_CST)
6703 if (TREE_CODE (arg0) == COMPLEX_CST)
6706 if (TREE_CONSTANT (arg1))
6708 if (TREE_CONSTANT (arg0))
6714 if (reorder && flag_evaluation_order
6715 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6718 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6719 for commutative and comparison operators. Ensuring a canonical
6720 form allows the optimizers to find additional redundancies without
6721 having to explicitly check for both orderings. */
6722 if (TREE_CODE (arg0) == SSA_NAME
6723 && TREE_CODE (arg1) == SSA_NAME
6724 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6727 /* Put SSA_NAMEs last. */
6728 if (TREE_CODE (arg1) == SSA_NAME)
6730 if (TREE_CODE (arg0) == SSA_NAME)
6733 /* Put variables last. */
6742 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6743 ARG0 is extended to a wider type. */
6746 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6748 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6750 tree shorter_type, outer_type;
6754 if (arg0_unw == arg0)
6756 shorter_type = TREE_TYPE (arg0_unw);
6758 #ifdef HAVE_canonicalize_funcptr_for_compare
6759 /* Disable this optimization if we're casting a function pointer
6760 type on targets that require function pointer canonicalization. */
6761 if (HAVE_canonicalize_funcptr_for_compare
6762 && TREE_CODE (shorter_type) == POINTER_TYPE
6763 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6767 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6770 arg1_unw = get_unwidened (arg1, shorter_type);
6772 /* If possible, express the comparison in the shorter mode. */
6773 if ((code == EQ_EXPR || code == NE_EXPR
6774 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6775 && (TREE_TYPE (arg1_unw) == shorter_type
6776 || (TREE_CODE (arg1_unw) == INTEGER_CST
6777 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6778 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6779 && int_fits_type_p (arg1_unw, shorter_type))))
6780 return fold_build2 (code, type, arg0_unw,
6781 fold_convert (shorter_type, arg1_unw));
6783 if (TREE_CODE (arg1_unw) != INTEGER_CST
6784 || TREE_CODE (shorter_type) != INTEGER_TYPE
6785 || !int_fits_type_p (arg1_unw, shorter_type))
6788 /* If we are comparing with the integer that does not fit into the range
6789 of the shorter type, the result is known. */
6790 outer_type = TREE_TYPE (arg1_unw);
6791 min = lower_bound_in_type (outer_type, shorter_type);
6792 max = upper_bound_in_type (outer_type, shorter_type);
6794 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6796 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6803 return omit_one_operand (type, integer_zero_node, arg0);
6808 return omit_one_operand (type, integer_one_node, arg0);
6814 return omit_one_operand (type, integer_one_node, arg0);
6816 return omit_one_operand (type, integer_zero_node, arg0);
6821 return omit_one_operand (type, integer_zero_node, arg0);
6823 return omit_one_operand (type, integer_one_node, arg0);
6832 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6833 ARG0 just the signedness is changed. */
6836 fold_sign_changed_comparison (enum tree_code code, tree type,
6837 tree arg0, tree arg1)
6840 tree inner_type, outer_type;
6842 if (TREE_CODE (arg0) != NOP_EXPR
6843 && TREE_CODE (arg0) != CONVERT_EXPR)
6846 outer_type = TREE_TYPE (arg0);
6847 arg0_inner = TREE_OPERAND (arg0, 0);
6848 inner_type = TREE_TYPE (arg0_inner);
6850 #ifdef HAVE_canonicalize_funcptr_for_compare
6851 /* Disable this optimization if we're casting a function pointer
6852 type on targets that require function pointer canonicalization. */
6853 if (HAVE_canonicalize_funcptr_for_compare
6854 && TREE_CODE (inner_type) == POINTER_TYPE
6855 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6859 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6862 if (TREE_CODE (arg1) != INTEGER_CST
6863 && !((TREE_CODE (arg1) == NOP_EXPR
6864 || TREE_CODE (arg1) == CONVERT_EXPR)
6865 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6868 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6873 if (TREE_CODE (arg1) == INTEGER_CST)
6874 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6875 TREE_INT_CST_HIGH (arg1), 0,
6876 TREE_OVERFLOW (arg1));
6878 arg1 = fold_convert (inner_type, arg1);
6880 return fold_build2 (code, type, arg0_inner, arg1);
6883 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6884 step of the array. Reconstructs s and delta in the case of s * delta
6885 being an integer constant (and thus already folded).
6886 ADDR is the address. MULT is the multiplicative expression.
6887 If the function succeeds, the new address expression is returned. Otherwise
6888 NULL_TREE is returned. */
6891 try_move_mult_to_index (tree addr, tree op1)
6893 tree s, delta, step;
6894 tree ref = TREE_OPERAND (addr, 0), pref;
6899 /* Strip the nops that might be added when converting op1 to sizetype. */
6902 /* Canonicalize op1 into a possibly non-constant delta
6903 and an INTEGER_CST s. */
6904 if (TREE_CODE (op1) == MULT_EXPR)
6906 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6911 if (TREE_CODE (arg0) == INTEGER_CST)
6916 else if (TREE_CODE (arg1) == INTEGER_CST)
6924 else if (TREE_CODE (op1) == INTEGER_CST)
6931 /* Simulate we are delta * 1. */
6933 s = integer_one_node;
6936 for (;; ref = TREE_OPERAND (ref, 0))
6938 if (TREE_CODE (ref) == ARRAY_REF)
6940 /* Remember if this was a multi-dimensional array. */
6941 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6944 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6948 step = array_ref_element_size (ref);
6949 if (TREE_CODE (step) != INTEGER_CST)
6954 if (! tree_int_cst_equal (step, s))
6959 /* Try if delta is a multiple of step. */
6960 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6966 /* Only fold here if we can verify we do not overflow one
6967 dimension of a multi-dimensional array. */
6972 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6973 || !INTEGRAL_TYPE_P (itype)
6974 || !TYPE_MAX_VALUE (itype)
6975 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6978 tmp = fold_binary (PLUS_EXPR, itype,
6979 fold_convert (itype,
6980 TREE_OPERAND (ref, 1)),
6981 fold_convert (itype, delta));
6983 || TREE_CODE (tmp) != INTEGER_CST
6984 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6993 if (!handled_component_p (ref))
6997 /* We found the suitable array reference. So copy everything up to it,
6998 and replace the index. */
7000 pref = TREE_OPERAND (addr, 0);
7001 ret = copy_node (pref);
7006 pref = TREE_OPERAND (pref, 0);
7007 TREE_OPERAND (pos, 0) = copy_node (pref);
7008 pos = TREE_OPERAND (pos, 0);
7011 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7012 fold_convert (itype,
7013 TREE_OPERAND (pos, 1)),
7014 fold_convert (itype, delta));
7016 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7020 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7021 means A >= Y && A != MAX, but in this case we know that
7022 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7025 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7027 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7029 if (TREE_CODE (bound) == LT_EXPR)
7030 a = TREE_OPERAND (bound, 0);
7031 else if (TREE_CODE (bound) == GT_EXPR)
7032 a = TREE_OPERAND (bound, 1);
7036 typea = TREE_TYPE (a);
7037 if (!INTEGRAL_TYPE_P (typea)
7038 && !POINTER_TYPE_P (typea))
7041 if (TREE_CODE (ineq) == LT_EXPR)
7043 a1 = TREE_OPERAND (ineq, 1);
7044 y = TREE_OPERAND (ineq, 0);
7046 else if (TREE_CODE (ineq) == GT_EXPR)
7048 a1 = TREE_OPERAND (ineq, 0);
7049 y = TREE_OPERAND (ineq, 1);
7054 if (TREE_TYPE (a1) != typea)
7057 if (POINTER_TYPE_P (typea))
7059 /* Convert the pointer types into integer before taking the difference. */
7060 tree ta = fold_convert (ssizetype, a);
7061 tree ta1 = fold_convert (ssizetype, a1);
7062 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7065 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7067 if (!diff || !integer_onep (diff))
7070 return fold_build2 (GE_EXPR, type, a, y);
7073 /* Fold a sum or difference of at least one multiplication.
7074 Returns the folded tree or NULL if no simplification could be made. */
7077 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7079 tree arg00, arg01, arg10, arg11;
7080 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7082 /* (A * C) +- (B * C) -> (A+-B) * C.
7083 (A * C) +- A -> A * (C+-1).
7084 We are most concerned about the case where C is a constant,
7085 but other combinations show up during loop reduction. Since
7086 it is not difficult, try all four possibilities. */
7088 if (TREE_CODE (arg0) == MULT_EXPR)
7090 arg00 = TREE_OPERAND (arg0, 0);
7091 arg01 = TREE_OPERAND (arg0, 1);
7096 arg01 = build_one_cst (type);
7098 if (TREE_CODE (arg1) == MULT_EXPR)
7100 arg10 = TREE_OPERAND (arg1, 0);
7101 arg11 = TREE_OPERAND (arg1, 1);
7106 arg11 = build_one_cst (type);
7110 if (operand_equal_p (arg01, arg11, 0))
7111 same = arg01, alt0 = arg00, alt1 = arg10;
7112 else if (operand_equal_p (arg00, arg10, 0))
7113 same = arg00, alt0 = arg01, alt1 = arg11;
7114 else if (operand_equal_p (arg00, arg11, 0))
7115 same = arg00, alt0 = arg01, alt1 = arg10;
7116 else if (operand_equal_p (arg01, arg10, 0))
7117 same = arg01, alt0 = arg00, alt1 = arg11;
7119 /* No identical multiplicands; see if we can find a common
7120 power-of-two factor in non-power-of-two multiplies. This
7121 can help in multi-dimensional array access. */
7122 else if (host_integerp (arg01, 0)
7123 && host_integerp (arg11, 0))
7125 HOST_WIDE_INT int01, int11, tmp;
7128 int01 = TREE_INT_CST_LOW (arg01);
7129 int11 = TREE_INT_CST_LOW (arg11);
7131 /* Move min of absolute values to int11. */
7132 if ((int01 >= 0 ? int01 : -int01)
7133 < (int11 >= 0 ? int11 : -int11))
7135 tmp = int01, int01 = int11, int11 = tmp;
7136 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7143 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7145 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7146 build_int_cst (TREE_TYPE (arg00),
7151 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7156 return fold_build2 (MULT_EXPR, type,
7157 fold_build2 (code, type,
7158 fold_convert (type, alt0),
7159 fold_convert (type, alt1)),
7160 fold_convert (type, same));
7165 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7166 specified by EXPR into the buffer PTR of length LEN bytes.
7167 Return the number of bytes placed in the buffer, or zero
7171 native_encode_int (tree expr, unsigned char *ptr, int len)
7173 tree type = TREE_TYPE (expr);
7174 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7175 int byte, offset, word, words;
7176 unsigned char value;
7178 if (total_bytes > len)
7180 words = total_bytes / UNITS_PER_WORD;
7182 for (byte = 0; byte < total_bytes; byte++)
7184 int bitpos = byte * BITS_PER_UNIT;
7185 if (bitpos < HOST_BITS_PER_WIDE_INT)
7186 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7188 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7189 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7191 if (total_bytes > UNITS_PER_WORD)
7193 word = byte / UNITS_PER_WORD;
7194 if (WORDS_BIG_ENDIAN)
7195 word = (words - 1) - word;
7196 offset = word * UNITS_PER_WORD;
7197 if (BYTES_BIG_ENDIAN)
7198 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7200 offset += byte % UNITS_PER_WORD;
7203 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7204 ptr[offset] = value;
7210 /* Subroutine of native_encode_expr. Encode the REAL_CST
7211 specified by EXPR into the buffer PTR of length LEN bytes.
7212 Return the number of bytes placed in the buffer, or zero
7216 native_encode_real (tree expr, unsigned char *ptr, int len)
7218 tree type = TREE_TYPE (expr);
7219 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7220 int byte, offset, word, words, bitpos;
7221 unsigned char value;
7223 /* There are always 32 bits in each long, no matter the size of
7224 the hosts long. We handle floating point representations with
7228 if (total_bytes > len)
7230 words = 32 / UNITS_PER_WORD;
7232 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7234 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7235 bitpos += BITS_PER_UNIT)
7237 byte = (bitpos / BITS_PER_UNIT) & 3;
7238 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7240 if (UNITS_PER_WORD < 4)
7242 word = byte / UNITS_PER_WORD;
7243 if (WORDS_BIG_ENDIAN)
7244 word = (words - 1) - word;
7245 offset = word * UNITS_PER_WORD;
7246 if (BYTES_BIG_ENDIAN)
7247 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7249 offset += byte % UNITS_PER_WORD;
7252 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7253 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7258 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7259 specified by EXPR into the buffer PTR of length LEN bytes.
7260 Return the number of bytes placed in the buffer, or zero
7264 native_encode_complex (tree expr, unsigned char *ptr, int len)
7269 part = TREE_REALPART (expr);
7270 rsize = native_encode_expr (part, ptr, len);
7273 part = TREE_IMAGPART (expr);
7274 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7277 return rsize + isize;
7281 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7282 specified by EXPR into the buffer PTR of length LEN bytes.
7283 Return the number of bytes placed in the buffer, or zero
7287 native_encode_vector (tree expr, unsigned char *ptr, int len)
7289 int i, size, offset, count;
7290 tree itype, elem, elements;
7293 elements = TREE_VECTOR_CST_ELTS (expr);
7294 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7295 itype = TREE_TYPE (TREE_TYPE (expr));
7296 size = GET_MODE_SIZE (TYPE_MODE (itype));
7297 for (i = 0; i < count; i++)
7301 elem = TREE_VALUE (elements);
7302 elements = TREE_CHAIN (elements);
7309 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7314 if (offset + size > len)
7316 memset (ptr+offset, 0, size);
7324 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7325 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7326 buffer PTR of length LEN bytes. Return the number of bytes
7327 placed in the buffer, or zero upon failure. */
7330 native_encode_expr (tree expr, unsigned char *ptr, int len)
7332 switch (TREE_CODE (expr))
7335 return native_encode_int (expr, ptr, len);
7338 return native_encode_real (expr, ptr, len);
7341 return native_encode_complex (expr, ptr, len);
7344 return native_encode_vector (expr, ptr, len);
7352 /* Subroutine of native_interpret_expr. Interpret the contents of
7353 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7354 If the buffer cannot be interpreted, return NULL_TREE. */
7357 native_interpret_int (tree type, unsigned char *ptr, int len)
7359 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7360 int byte, offset, word, words;
7361 unsigned char value;
7362 unsigned int HOST_WIDE_INT lo = 0;
7363 HOST_WIDE_INT hi = 0;
7365 if (total_bytes > len)
7367 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7369 words = total_bytes / UNITS_PER_WORD;
7371 for (byte = 0; byte < total_bytes; byte++)
7373 int bitpos = byte * BITS_PER_UNIT;
7374 if (total_bytes > UNITS_PER_WORD)
7376 word = byte / UNITS_PER_WORD;
7377 if (WORDS_BIG_ENDIAN)
7378 word = (words - 1) - word;
7379 offset = word * UNITS_PER_WORD;
7380 if (BYTES_BIG_ENDIAN)
7381 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7383 offset += byte % UNITS_PER_WORD;
7386 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7387 value = ptr[offset];
7389 if (bitpos < HOST_BITS_PER_WIDE_INT)
7390 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7392 hi |= (unsigned HOST_WIDE_INT) value
7393 << (bitpos - HOST_BITS_PER_WIDE_INT);
7396 return build_int_cst_wide_type (type, lo, hi);
7400 /* Subroutine of native_interpret_expr. Interpret the contents of
7401 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7402 If the buffer cannot be interpreted, return NULL_TREE. */
7405 native_interpret_real (tree type, unsigned char *ptr, int len)
7407 enum machine_mode mode = TYPE_MODE (type);
7408 int total_bytes = GET_MODE_SIZE (mode);
7409 int byte, offset, word, words, bitpos;
7410 unsigned char value;
7411 /* There are always 32 bits in each long, no matter the size of
7412 the hosts long. We handle floating point representations with
7417 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7418 if (total_bytes > len || total_bytes > 24)
7420 words = 32 / UNITS_PER_WORD;
7422 memset (tmp, 0, sizeof (tmp));
7423 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7424 bitpos += BITS_PER_UNIT)
7426 byte = (bitpos / BITS_PER_UNIT) & 3;
7427 if (UNITS_PER_WORD < 4)
7429 word = byte / UNITS_PER_WORD;
7430 if (WORDS_BIG_ENDIAN)
7431 word = (words - 1) - word;
7432 offset = word * UNITS_PER_WORD;
7433 if (BYTES_BIG_ENDIAN)
7434 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7436 offset += byte % UNITS_PER_WORD;
7439 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7440 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7442 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7445 real_from_target (&r, tmp, mode);
7446 return build_real (type, r);
7450 /* Subroutine of native_interpret_expr. Interpret the contents of
7451 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7452 If the buffer cannot be interpreted, return NULL_TREE. */
7455 native_interpret_complex (tree type, unsigned char *ptr, int len)
7457 tree etype, rpart, ipart;
7460 etype = TREE_TYPE (type);
7461 size = GET_MODE_SIZE (TYPE_MODE (etype));
7464 rpart = native_interpret_expr (etype, ptr, size);
7467 ipart = native_interpret_expr (etype, ptr+size, size);
7470 return build_complex (type, rpart, ipart);
7474 /* Subroutine of native_interpret_expr. Interpret the contents of
7475 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7476 If the buffer cannot be interpreted, return NULL_TREE. */
7479 native_interpret_vector (tree type, unsigned char *ptr, int len)
7481 tree etype, elem, elements;
7484 etype = TREE_TYPE (type);
7485 size = GET_MODE_SIZE (TYPE_MODE (etype));
7486 count = TYPE_VECTOR_SUBPARTS (type);
7487 if (size * count > len)
7490 elements = NULL_TREE;
7491 for (i = count - 1; i >= 0; i--)
7493 elem = native_interpret_expr (etype, ptr+(i*size), size);
7496 elements = tree_cons (NULL_TREE, elem, elements);
7498 return build_vector (type, elements);
7502 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7503 the buffer PTR of length LEN as a constant of type TYPE. For
7504 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7505 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7506 return NULL_TREE. */
7509 native_interpret_expr (tree type, unsigned char *ptr, int len)
7511 switch (TREE_CODE (type))
7516 return native_interpret_int (type, ptr, len);
7519 return native_interpret_real (type, ptr, len);
7522 return native_interpret_complex (type, ptr, len);
7525 return native_interpret_vector (type, ptr, len);
7533 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7534 TYPE at compile-time. If we're unable to perform the conversion
7535 return NULL_TREE. */
7538 fold_view_convert_expr (tree type, tree expr)
7540 /* We support up to 512-bit values (for V8DFmode). */
7541 unsigned char buffer[64];
7544 /* Check that the host and target are sane. */
7545 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7548 len = native_encode_expr (expr, buffer, sizeof (buffer));
7552 return native_interpret_expr (type, buffer, len);
7556 /* Fold a unary expression of code CODE and type TYPE with operand
7557 OP0. Return the folded expression if folding is successful.
7558 Otherwise, return NULL_TREE. */
7561 fold_unary (enum tree_code code, tree type, tree op0)
7565 enum tree_code_class kind = TREE_CODE_CLASS (code);
7567 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7568 && TREE_CODE_LENGTH (code) == 1);
7573 if (code == NOP_EXPR || code == CONVERT_EXPR
7574 || code == FLOAT_EXPR || code == ABS_EXPR)
7576 /* Don't use STRIP_NOPS, because signedness of argument type
7578 STRIP_SIGN_NOPS (arg0);
7582 /* Strip any conversions that don't change the mode. This
7583 is safe for every expression, except for a comparison
7584 expression because its signedness is derived from its
7587 Note that this is done as an internal manipulation within
7588 the constant folder, in order to find the simplest
7589 representation of the arguments so that their form can be
7590 studied. In any cases, the appropriate type conversions
7591 should be put back in the tree that will get out of the
7597 if (TREE_CODE_CLASS (code) == tcc_unary)
7599 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7600 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7601 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7602 else if (TREE_CODE (arg0) == COND_EXPR)
7604 tree arg01 = TREE_OPERAND (arg0, 1);
7605 tree arg02 = TREE_OPERAND (arg0, 2);
7606 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7607 arg01 = fold_build1 (code, type, arg01);
7608 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7609 arg02 = fold_build1 (code, type, arg02);
7610 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7613 /* If this was a conversion, and all we did was to move into
7614 inside the COND_EXPR, bring it back out. But leave it if
7615 it is a conversion from integer to integer and the
7616 result precision is no wider than a word since such a
7617 conversion is cheap and may be optimized away by combine,
7618 while it couldn't if it were outside the COND_EXPR. Then return
7619 so we don't get into an infinite recursion loop taking the
7620 conversion out and then back in. */
7622 if ((code == NOP_EXPR || code == CONVERT_EXPR
7623 || code == NON_LVALUE_EXPR)
7624 && TREE_CODE (tem) == COND_EXPR
7625 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7626 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7627 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7628 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7629 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7630 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7631 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7633 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7634 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7635 || flag_syntax_only))
7636 tem = build1 (code, type,
7638 TREE_TYPE (TREE_OPERAND
7639 (TREE_OPERAND (tem, 1), 0)),
7640 TREE_OPERAND (tem, 0),
7641 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7642 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7645 else if (COMPARISON_CLASS_P (arg0))
7647 if (TREE_CODE (type) == BOOLEAN_TYPE)
7649 arg0 = copy_node (arg0);
7650 TREE_TYPE (arg0) = type;
7653 else if (TREE_CODE (type) != INTEGER_TYPE)
7654 return fold_build3 (COND_EXPR, type, arg0,
7655 fold_build1 (code, type,
7657 fold_build1 (code, type,
7658 integer_zero_node));
7667 case FIX_TRUNC_EXPR:
7668 if (TREE_TYPE (op0) == type)
7671 /* If we have (type) (a CMP b) and type is an integral type, return
7672 new expression involving the new type. */
7673 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7674 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7675 TREE_OPERAND (op0, 1));
7677 /* Handle cases of two conversions in a row. */
7678 if (TREE_CODE (op0) == NOP_EXPR
7679 || TREE_CODE (op0) == CONVERT_EXPR)
7681 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7682 tree inter_type = TREE_TYPE (op0);
7683 int inside_int = INTEGRAL_TYPE_P (inside_type);
7684 int inside_ptr = POINTER_TYPE_P (inside_type);
7685 int inside_float = FLOAT_TYPE_P (inside_type);
7686 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7687 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7688 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7689 int inter_int = INTEGRAL_TYPE_P (inter_type);
7690 int inter_ptr = POINTER_TYPE_P (inter_type);
7691 int inter_float = FLOAT_TYPE_P (inter_type);
7692 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7693 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7694 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7695 int final_int = INTEGRAL_TYPE_P (type);
7696 int final_ptr = POINTER_TYPE_P (type);
7697 int final_float = FLOAT_TYPE_P (type);
7698 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7699 unsigned int final_prec = TYPE_PRECISION (type);
7700 int final_unsignedp = TYPE_UNSIGNED (type);
7702 /* In addition to the cases of two conversions in a row
7703 handled below, if we are converting something to its own
7704 type via an object of identical or wider precision, neither
7705 conversion is needed. */
7706 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7707 && (((inter_int || inter_ptr) && final_int)
7708 || (inter_float && final_float))
7709 && inter_prec >= final_prec)
7710 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7712 /* Likewise, if the intermediate and final types are either both
7713 float or both integer, we don't need the middle conversion if
7714 it is wider than the final type and doesn't change the signedness
7715 (for integers). Avoid this if the final type is a pointer
7716 since then we sometimes need the inner conversion. Likewise if
7717 the outer has a precision not equal to the size of its mode. */
7718 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7719 || (inter_float && inside_float)
7720 || (inter_vec && inside_vec))
7721 && inter_prec >= inside_prec
7722 && (inter_float || inter_vec
7723 || inter_unsignedp == inside_unsignedp)
7724 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7725 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7727 && (! final_vec || inter_prec == inside_prec))
7728 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7730 /* If we have a sign-extension of a zero-extended value, we can
7731 replace that by a single zero-extension. */
7732 if (inside_int && inter_int && final_int
7733 && inside_prec < inter_prec && inter_prec < final_prec
7734 && inside_unsignedp && !inter_unsignedp)
7735 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7737 /* Two conversions in a row are not needed unless:
7738 - some conversion is floating-point (overstrict for now), or
7739 - some conversion is a vector (overstrict for now), or
7740 - the intermediate type is narrower than both initial and
7742 - the intermediate type and innermost type differ in signedness,
7743 and the outermost type is wider than the intermediate, or
7744 - the initial type is a pointer type and the precisions of the
7745 intermediate and final types differ, or
7746 - the final type is a pointer type and the precisions of the
7747 initial and intermediate types differ.
7748 - the final type is a pointer type and the initial type not
7749 - the initial type is a pointer to an array and the final type
7751 if (! inside_float && ! inter_float && ! final_float
7752 && ! inside_vec && ! inter_vec && ! final_vec
7753 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7754 && ! (inside_int && inter_int
7755 && inter_unsignedp != inside_unsignedp
7756 && inter_prec < final_prec)
7757 && ((inter_unsignedp && inter_prec > inside_prec)
7758 == (final_unsignedp && final_prec > inter_prec))
7759 && ! (inside_ptr && inter_prec != final_prec)
7760 && ! (final_ptr && inside_prec != inter_prec)
7761 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7762 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7763 && final_ptr == inside_ptr
7765 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7766 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7767 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7770 /* Handle (T *)&A.B.C for A being of type T and B and C
7771 living at offset zero. This occurs frequently in
7772 C++ upcasting and then accessing the base. */
7773 if (TREE_CODE (op0) == ADDR_EXPR
7774 && POINTER_TYPE_P (type)
7775 && handled_component_p (TREE_OPERAND (op0, 0)))
7777 HOST_WIDE_INT bitsize, bitpos;
7779 enum machine_mode mode;
7780 int unsignedp, volatilep;
7781 tree base = TREE_OPERAND (op0, 0);
7782 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7783 &mode, &unsignedp, &volatilep, false);
7784 /* If the reference was to a (constant) zero offset, we can use
7785 the address of the base if it has the same base type
7786 as the result type. */
7787 if (! offset && bitpos == 0
7788 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7789 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7790 return fold_convert (type, build_fold_addr_expr (base));
7793 if ((TREE_CODE (op0) == MODIFY_EXPR
7794 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7795 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7796 /* Detect assigning a bitfield. */
7797 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7799 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7801 /* Don't leave an assignment inside a conversion
7802 unless assigning a bitfield. */
7803 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7804 /* First do the assignment, then return converted constant. */
7805 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7806 TREE_NO_WARNING (tem) = 1;
7807 TREE_USED (tem) = 1;
7811 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7812 constants (if x has signed type, the sign bit cannot be set
7813 in c). This folds extension into the BIT_AND_EXPR. */
7814 if (INTEGRAL_TYPE_P (type)
7815 && TREE_CODE (type) != BOOLEAN_TYPE
7816 && TREE_CODE (op0) == BIT_AND_EXPR
7817 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7820 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7823 if (TYPE_UNSIGNED (TREE_TYPE (and))
7824 || (TYPE_PRECISION (type)
7825 <= TYPE_PRECISION (TREE_TYPE (and))))
7827 else if (TYPE_PRECISION (TREE_TYPE (and1))
7828 <= HOST_BITS_PER_WIDE_INT
7829 && host_integerp (and1, 1))
7831 unsigned HOST_WIDE_INT cst;
7833 cst = tree_low_cst (and1, 1);
7834 cst &= (HOST_WIDE_INT) -1
7835 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7836 change = (cst == 0);
7837 #ifdef LOAD_EXTEND_OP
7839 && !flag_syntax_only
7840 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7843 tree uns = unsigned_type_for (TREE_TYPE (and0));
7844 and0 = fold_convert (uns, and0);
7845 and1 = fold_convert (uns, and1);
7851 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7852 TREE_INT_CST_HIGH (and1), 0,
7853 TREE_OVERFLOW (and1));
7854 return fold_build2 (BIT_AND_EXPR, type,
7855 fold_convert (type, and0), tem);
7859 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7860 when one of the new casts will fold away. Conservatively we assume
7861 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7862 if (POINTER_TYPE_P (type)
7863 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7864 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7865 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7866 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7868 tree arg00 = TREE_OPERAND (arg0, 0);
7869 tree arg01 = TREE_OPERAND (arg0, 1);
7871 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7872 fold_convert (sizetype, arg01));
7875 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7876 of the same precision, and X is an integer type not narrower than
7877 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7878 if (INTEGRAL_TYPE_P (type)
7879 && TREE_CODE (op0) == BIT_NOT_EXPR
7880 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7881 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7882 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7883 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7885 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7886 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7887 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7888 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7891 tem = fold_convert_const (code, type, op0);
7892 return tem ? tem : NULL_TREE;
7894 case VIEW_CONVERT_EXPR:
7895 if (TREE_TYPE (op0) == type)
7897 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7898 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7899 return fold_view_convert_expr (type, op0);
7902 tem = fold_negate_expr (arg0);
7904 return fold_convert (type, tem);
7908 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7909 return fold_abs_const (arg0, type);
7910 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7911 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7912 /* Convert fabs((double)float) into (double)fabsf(float). */
7913 else if (TREE_CODE (arg0) == NOP_EXPR
7914 && TREE_CODE (type) == REAL_TYPE)
7916 tree targ0 = strip_float_extensions (arg0);
7918 return fold_convert (type, fold_build1 (ABS_EXPR,
7922 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7923 else if (TREE_CODE (arg0) == ABS_EXPR)
7925 else if (tree_expr_nonnegative_p (arg0))
7928 /* Strip sign ops from argument. */
7929 if (TREE_CODE (type) == REAL_TYPE)
7931 tem = fold_strip_sign_ops (arg0);
7933 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7938 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7939 return fold_convert (type, arg0);
7940 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7942 tree itype = TREE_TYPE (type);
7943 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7944 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7945 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7947 if (TREE_CODE (arg0) == COMPLEX_CST)
7949 tree itype = TREE_TYPE (type);
7950 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7951 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7952 return build_complex (type, rpart, negate_expr (ipart));
7954 if (TREE_CODE (arg0) == CONJ_EXPR)
7955 return fold_convert (type, TREE_OPERAND (arg0, 0));
7959 if (TREE_CODE (arg0) == INTEGER_CST)
7960 return fold_not_const (arg0, type);
7961 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7962 return TREE_OPERAND (arg0, 0);
7963 /* Convert ~ (-A) to A - 1. */
7964 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7965 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7966 build_int_cst (type, 1));
7967 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7968 else if (INTEGRAL_TYPE_P (type)
7969 && ((TREE_CODE (arg0) == MINUS_EXPR
7970 && integer_onep (TREE_OPERAND (arg0, 1)))
7971 || (TREE_CODE (arg0) == PLUS_EXPR
7972 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7973 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7974 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7975 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7976 && (tem = fold_unary (BIT_NOT_EXPR, type,
7978 TREE_OPERAND (arg0, 0)))))
7979 return fold_build2 (BIT_XOR_EXPR, type, tem,
7980 fold_convert (type, TREE_OPERAND (arg0, 1)));
7981 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7982 && (tem = fold_unary (BIT_NOT_EXPR, type,
7984 TREE_OPERAND (arg0, 1)))))
7985 return fold_build2 (BIT_XOR_EXPR, type,
7986 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7990 case TRUTH_NOT_EXPR:
7991 /* The argument to invert_truthvalue must have Boolean type. */
7992 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7993 arg0 = fold_convert (boolean_type_node, arg0);
7995 /* Note that the operand of this must be an int
7996 and its values must be 0 or 1.
7997 ("true" is a fixed value perhaps depending on the language,
7998 but we don't handle values other than 1 correctly yet.) */
7999 tem = fold_truth_not_expr (arg0);
8002 return fold_convert (type, tem);
8005 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8006 return fold_convert (type, arg0);
8007 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8008 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8009 TREE_OPERAND (arg0, 1));
8010 if (TREE_CODE (arg0) == COMPLEX_CST)
8011 return fold_convert (type, TREE_REALPART (arg0));
8012 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8014 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8015 tem = fold_build2 (TREE_CODE (arg0), itype,
8016 fold_build1 (REALPART_EXPR, itype,
8017 TREE_OPERAND (arg0, 0)),
8018 fold_build1 (REALPART_EXPR, itype,
8019 TREE_OPERAND (arg0, 1)));
8020 return fold_convert (type, tem);
8022 if (TREE_CODE (arg0) == CONJ_EXPR)
8024 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8025 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8026 return fold_convert (type, tem);
8028 if (TREE_CODE (arg0) == CALL_EXPR)
8030 tree fn = get_callee_fndecl (arg0);
8031 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8032 switch (DECL_FUNCTION_CODE (fn))
8034 CASE_FLT_FN (BUILT_IN_CEXPI):
8035 fn = mathfn_built_in (type, BUILT_IN_COS);
8037 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8047 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8048 return fold_convert (type, integer_zero_node);
8049 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8050 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8051 TREE_OPERAND (arg0, 0));
8052 if (TREE_CODE (arg0) == COMPLEX_CST)
8053 return fold_convert (type, TREE_IMAGPART (arg0));
8054 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8056 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8057 tem = fold_build2 (TREE_CODE (arg0), itype,
8058 fold_build1 (IMAGPART_EXPR, itype,
8059 TREE_OPERAND (arg0, 0)),
8060 fold_build1 (IMAGPART_EXPR, itype,
8061 TREE_OPERAND (arg0, 1)));
8062 return fold_convert (type, tem);
8064 if (TREE_CODE (arg0) == CONJ_EXPR)
8066 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8067 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8068 return fold_convert (type, negate_expr (tem));
8070 if (TREE_CODE (arg0) == CALL_EXPR)
8072 tree fn = get_callee_fndecl (arg0);
8073 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8074 switch (DECL_FUNCTION_CODE (fn))
8076 CASE_FLT_FN (BUILT_IN_CEXPI):
8077 fn = mathfn_built_in (type, BUILT_IN_SIN);
8079 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8090 } /* switch (code) */
8093 /* Fold a binary expression of code CODE and type TYPE with operands
8094 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8095 Return the folded expression if folding is successful. Otherwise,
8096 return NULL_TREE. */
8099 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8101 enum tree_code compl_code;
8103 if (code == MIN_EXPR)
8104 compl_code = MAX_EXPR;
8105 else if (code == MAX_EXPR)
8106 compl_code = MIN_EXPR;
8110 /* MIN (MAX (a, b), b) == b. */
8111 if (TREE_CODE (op0) == compl_code
8112 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8113 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8115 /* MIN (MAX (b, a), b) == b. */
8116 if (TREE_CODE (op0) == compl_code
8117 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8118 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8119 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8121 /* MIN (a, MAX (a, b)) == a. */
8122 if (TREE_CODE (op1) == compl_code
8123 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8124 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8125 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8127 /* MIN (a, MAX (b, a)) == a. */
8128 if (TREE_CODE (op1) == compl_code
8129 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8130 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8131 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8136 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8137 by changing CODE to reduce the magnitude of constants involved in
8138 ARG0 of the comparison.
8139 Returns a canonicalized comparison tree if a simplification was
8140 possible, otherwise returns NULL_TREE.
8141 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8142 valid if signed overflow is undefined. */
8145 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8146 tree arg0, tree arg1,
8147 bool *strict_overflow_p)
8149 enum tree_code code0 = TREE_CODE (arg0);
8150 tree t, cst0 = NULL_TREE;
8154 /* Match A +- CST code arg1 and CST code arg1. */
8155 if (!(((code0 == MINUS_EXPR
8156 || code0 == PLUS_EXPR)
8157 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8158 || code0 == INTEGER_CST))
8161 /* Identify the constant in arg0 and its sign. */
8162 if (code0 == INTEGER_CST)
8165 cst0 = TREE_OPERAND (arg0, 1);
8166 sgn0 = tree_int_cst_sgn (cst0);
8168 /* Overflowed constants and zero will cause problems. */
8169 if (integer_zerop (cst0)
8170 || TREE_OVERFLOW (cst0))
8173 /* See if we can reduce the magnitude of the constant in
8174 arg0 by changing the comparison code. */
8175 if (code0 == INTEGER_CST)
8177 /* CST <= arg1 -> CST-1 < arg1. */
8178 if (code == LE_EXPR && sgn0 == 1)
8180 /* -CST < arg1 -> -CST-1 <= arg1. */
8181 else if (code == LT_EXPR && sgn0 == -1)
8183 /* CST > arg1 -> CST-1 >= arg1. */
8184 else if (code == GT_EXPR && sgn0 == 1)
8186 /* -CST >= arg1 -> -CST-1 > arg1. */
8187 else if (code == GE_EXPR && sgn0 == -1)
8191 /* arg1 code' CST' might be more canonical. */
8196 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8198 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8200 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8201 else if (code == GT_EXPR
8202 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8204 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8205 else if (code == LE_EXPR
8206 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8208 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8209 else if (code == GE_EXPR
8210 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8214 *strict_overflow_p = true;
8217 /* Now build the constant reduced in magnitude. */
8218 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8219 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8220 if (code0 != INTEGER_CST)
8221 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8223 /* If swapping might yield to a more canonical form, do so. */
8225 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8227 return fold_build2 (code, type, t, arg1);
8230 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8231 overflow further. Try to decrease the magnitude of constants involved
8232 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8233 and put sole constants at the second argument position.
8234 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8237 maybe_canonicalize_comparison (enum tree_code code, tree type,
8238 tree arg0, tree arg1)
8241 bool strict_overflow_p;
8242 const char * const warnmsg = G_("assuming signed overflow does not occur "
8243 "when reducing constant in comparison");
8245 /* In principle pointers also have undefined overflow behavior,
8246 but that causes problems elsewhere. */
8247 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8248 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8251 /* Try canonicalization by simplifying arg0. */
8252 strict_overflow_p = false;
8253 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8254 &strict_overflow_p);
8257 if (strict_overflow_p)
8258 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8262 /* Try canonicalization by simplifying arg1 using the swapped
8264 code = swap_tree_comparison (code);
8265 strict_overflow_p = false;
8266 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8267 &strict_overflow_p);
8268 if (t && strict_overflow_p)
8269 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8273 /* Subroutine of fold_binary. This routine performs all of the
8274 transformations that are common to the equality/inequality
8275 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8276 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8277 fold_binary should call fold_binary. Fold a comparison with
8278 tree code CODE and type TYPE with operands OP0 and OP1. Return
8279 the folded comparison or NULL_TREE. */
8282 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8284 tree arg0, arg1, tem;
8289 STRIP_SIGN_NOPS (arg0);
8290 STRIP_SIGN_NOPS (arg1);
8292 tem = fold_relational_const (code, type, arg0, arg1);
8293 if (tem != NULL_TREE)
8296 /* If one arg is a real or integer constant, put it last. */
8297 if (tree_swap_operands_p (arg0, arg1, true))
8298 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8300 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8301 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8302 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8303 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8304 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8305 && (TREE_CODE (arg1) == INTEGER_CST
8306 && !TREE_OVERFLOW (arg1)))
8308 tree const1 = TREE_OPERAND (arg0, 1);
8310 tree variable = TREE_OPERAND (arg0, 0);
8313 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8315 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8316 TREE_TYPE (arg1), const2, const1);
8318 /* If the constant operation overflowed this can be
8319 simplified as a comparison against INT_MAX/INT_MIN. */
8320 if (TREE_CODE (lhs) == INTEGER_CST
8321 && TREE_OVERFLOW (lhs))
8323 int const1_sgn = tree_int_cst_sgn (const1);
8324 enum tree_code code2 = code;
8326 /* Get the sign of the constant on the lhs if the
8327 operation were VARIABLE + CONST1. */
8328 if (TREE_CODE (arg0) == MINUS_EXPR)
8329 const1_sgn = -const1_sgn;
8331 /* The sign of the constant determines if we overflowed
8332 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8333 Canonicalize to the INT_MIN overflow by swapping the comparison
8335 if (const1_sgn == -1)
8336 code2 = swap_tree_comparison (code);
8338 /* We now can look at the canonicalized case
8339 VARIABLE + 1 CODE2 INT_MIN
8340 and decide on the result. */
8341 if (code2 == LT_EXPR
8343 || code2 == EQ_EXPR)
8344 return omit_one_operand (type, boolean_false_node, variable);
8345 else if (code2 == NE_EXPR
8347 || code2 == GT_EXPR)
8348 return omit_one_operand (type, boolean_true_node, variable);
8351 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8352 && (TREE_CODE (lhs) != INTEGER_CST
8353 || !TREE_OVERFLOW (lhs)))
8355 fold_overflow_warning (("assuming signed overflow does not occur "
8356 "when changing X +- C1 cmp C2 to "
8358 WARN_STRICT_OVERFLOW_COMPARISON);
8359 return fold_build2 (code, type, variable, lhs);
8363 /* For comparisons of pointers we can decompose it to a compile time
8364 comparison of the base objects and the offsets into the object.
8365 This requires at least one operand being an ADDR_EXPR to do more
8366 than the operand_equal_p test below. */
8367 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8368 && (TREE_CODE (arg0) == ADDR_EXPR
8369 || TREE_CODE (arg1) == ADDR_EXPR))
8371 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8372 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8373 enum machine_mode mode;
8374 int volatilep, unsignedp;
8375 bool indirect_base0 = false;
8377 /* Get base and offset for the access. Strip ADDR_EXPR for
8378 get_inner_reference, but put it back by stripping INDIRECT_REF
8379 off the base object if possible. */
8381 if (TREE_CODE (arg0) == ADDR_EXPR)
8383 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8384 &bitsize, &bitpos0, &offset0, &mode,
8385 &unsignedp, &volatilep, false);
8386 if (TREE_CODE (base0) == INDIRECT_REF)
8387 base0 = TREE_OPERAND (base0, 0);
8389 indirect_base0 = true;
8393 if (TREE_CODE (arg1) == ADDR_EXPR)
8395 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8396 &bitsize, &bitpos1, &offset1, &mode,
8397 &unsignedp, &volatilep, false);
8398 /* We have to make sure to have an indirect/non-indirect base1
8399 just the same as we did for base0. */
8400 if (TREE_CODE (base1) == INDIRECT_REF
8402 base1 = TREE_OPERAND (base1, 0);
8403 else if (!indirect_base0)
8406 else if (indirect_base0)
8409 /* If we have equivalent bases we might be able to simplify. */
8411 && operand_equal_p (base0, base1, 0))
8413 /* We can fold this expression to a constant if the non-constant
8414 offset parts are equal. */
8415 if (offset0 == offset1
8416 || (offset0 && offset1
8417 && operand_equal_p (offset0, offset1, 0)))
8422 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8424 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8426 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8428 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8430 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8432 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8436 /* We can simplify the comparison to a comparison of the variable
8437 offset parts if the constant offset parts are equal.
8438 Be careful to use signed size type here because otherwise we
8439 mess with array offsets in the wrong way. This is possible
8440 because pointer arithmetic is restricted to retain within an
8441 object and overflow on pointer differences is undefined as of
8442 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8443 else if (bitpos0 == bitpos1)
8445 tree signed_size_type_node;
8446 signed_size_type_node = signed_type_for (size_type_node);
8448 /* By converting to signed size type we cover middle-end pointer
8449 arithmetic which operates on unsigned pointer types of size
8450 type size and ARRAY_REF offsets which are properly sign or
8451 zero extended from their type in case it is narrower than
8453 if (offset0 == NULL_TREE)
8454 offset0 = build_int_cst (signed_size_type_node, 0);
8456 offset0 = fold_convert (signed_size_type_node, offset0);
8457 if (offset1 == NULL_TREE)
8458 offset1 = build_int_cst (signed_size_type_node, 0);
8460 offset1 = fold_convert (signed_size_type_node, offset1);
8462 return fold_build2 (code, type, offset0, offset1);
8467 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8468 same object, then we can fold this to a comparison of the two offsets in
8469 signed size type. This is possible because pointer arithmetic is
8470 restricted to retain within an object and overflow on pointer differences
8471 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8473 We check flag_wrapv directly because pointers types are unsigned,
8474 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8475 normally what we want to avoid certain odd overflow cases, but
8477 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8479 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8481 tree base0, offset0, base1, offset1;
8483 if (extract_array_ref (arg0, &base0, &offset0)
8484 && extract_array_ref (arg1, &base1, &offset1)
8485 && operand_equal_p (base0, base1, 0))
8487 tree signed_size_type_node;
8488 signed_size_type_node = signed_type_for (size_type_node);
8490 /* By converting to signed size type we cover middle-end pointer
8491 arithmetic which operates on unsigned pointer types of size
8492 type size and ARRAY_REF offsets which are properly sign or
8493 zero extended from their type in case it is narrower than
8495 if (offset0 == NULL_TREE)
8496 offset0 = build_int_cst (signed_size_type_node, 0);
8498 offset0 = fold_convert (signed_size_type_node, offset0);
8499 if (offset1 == NULL_TREE)
8500 offset1 = build_int_cst (signed_size_type_node, 0);
8502 offset1 = fold_convert (signed_size_type_node, offset1);
8504 return fold_build2 (code, type, offset0, offset1);
8508 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8509 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8510 the resulting offset is smaller in absolute value than the
8512 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8513 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8514 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8515 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8516 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8517 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8518 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8520 tree const1 = TREE_OPERAND (arg0, 1);
8521 tree const2 = TREE_OPERAND (arg1, 1);
8522 tree variable1 = TREE_OPERAND (arg0, 0);
8523 tree variable2 = TREE_OPERAND (arg1, 0);
8525 const char * const warnmsg = G_("assuming signed overflow does not "
8526 "occur when combining constants around "
8529 /* Put the constant on the side where it doesn't overflow and is
8530 of lower absolute value than before. */
8531 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8532 ? MINUS_EXPR : PLUS_EXPR,
8534 if (!TREE_OVERFLOW (cst)
8535 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8537 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8538 return fold_build2 (code, type,
8540 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8544 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8545 ? MINUS_EXPR : PLUS_EXPR,
8547 if (!TREE_OVERFLOW (cst)
8548 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8550 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8551 return fold_build2 (code, type,
8552 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8558 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8559 signed arithmetic case. That form is created by the compiler
8560 often enough for folding it to be of value. One example is in
8561 computing loop trip counts after Operator Strength Reduction. */
8562 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8563 && TREE_CODE (arg0) == MULT_EXPR
8564 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8565 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8566 && integer_zerop (arg1))
8568 tree const1 = TREE_OPERAND (arg0, 1);
8569 tree const2 = arg1; /* zero */
8570 tree variable1 = TREE_OPERAND (arg0, 0);
8571 enum tree_code cmp_code = code;
8573 gcc_assert (!integer_zerop (const1));
8575 fold_overflow_warning (("assuming signed overflow does not occur when "
8576 "eliminating multiplication in comparison "
8578 WARN_STRICT_OVERFLOW_COMPARISON);
8580 /* If const1 is negative we swap the sense of the comparison. */
8581 if (tree_int_cst_sgn (const1) < 0)
8582 cmp_code = swap_tree_comparison (cmp_code);
8584 return fold_build2 (cmp_code, type, variable1, const2);
8587 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8591 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8593 tree targ0 = strip_float_extensions (arg0);
8594 tree targ1 = strip_float_extensions (arg1);
8595 tree newtype = TREE_TYPE (targ0);
8597 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8598 newtype = TREE_TYPE (targ1);
8600 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8601 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8602 return fold_build2 (code, type, fold_convert (newtype, targ0),
8603 fold_convert (newtype, targ1));
8605 /* (-a) CMP (-b) -> b CMP a */
8606 if (TREE_CODE (arg0) == NEGATE_EXPR
8607 && TREE_CODE (arg1) == NEGATE_EXPR)
8608 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8609 TREE_OPERAND (arg0, 0));
8611 if (TREE_CODE (arg1) == REAL_CST)
8613 REAL_VALUE_TYPE cst;
8614 cst = TREE_REAL_CST (arg1);
8616 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8617 if (TREE_CODE (arg0) == NEGATE_EXPR)
8618 return fold_build2 (swap_tree_comparison (code), type,
8619 TREE_OPERAND (arg0, 0),
8620 build_real (TREE_TYPE (arg1),
8621 REAL_VALUE_NEGATE (cst)));
8623 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8624 /* a CMP (-0) -> a CMP 0 */
8625 if (REAL_VALUE_MINUS_ZERO (cst))
8626 return fold_build2 (code, type, arg0,
8627 build_real (TREE_TYPE (arg1), dconst0));
8629 /* x != NaN is always true, other ops are always false. */
8630 if (REAL_VALUE_ISNAN (cst)
8631 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8633 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8634 return omit_one_operand (type, tem, arg0);
8637 /* Fold comparisons against infinity. */
8638 if (REAL_VALUE_ISINF (cst))
8640 tem = fold_inf_compare (code, type, arg0, arg1);
8641 if (tem != NULL_TREE)
8646 /* If this is a comparison of a real constant with a PLUS_EXPR
8647 or a MINUS_EXPR of a real constant, we can convert it into a
8648 comparison with a revised real constant as long as no overflow
8649 occurs when unsafe_math_optimizations are enabled. */
8650 if (flag_unsafe_math_optimizations
8651 && TREE_CODE (arg1) == REAL_CST
8652 && (TREE_CODE (arg0) == PLUS_EXPR
8653 || TREE_CODE (arg0) == MINUS_EXPR)
8654 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8655 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8656 ? MINUS_EXPR : PLUS_EXPR,
8657 arg1, TREE_OPERAND (arg0, 1), 0))
8658 && !TREE_OVERFLOW (tem))
8659 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8661 /* Likewise, we can simplify a comparison of a real constant with
8662 a MINUS_EXPR whose first operand is also a real constant, i.e.
8663 (c1 - x) < c2 becomes x > c1-c2. */
8664 if (flag_unsafe_math_optimizations
8665 && TREE_CODE (arg1) == REAL_CST
8666 && TREE_CODE (arg0) == MINUS_EXPR
8667 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8668 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8670 && !TREE_OVERFLOW (tem))
8671 return fold_build2 (swap_tree_comparison (code), type,
8672 TREE_OPERAND (arg0, 1), tem);
8674 /* Fold comparisons against built-in math functions. */
8675 if (TREE_CODE (arg1) == REAL_CST
8676 && flag_unsafe_math_optimizations
8677 && ! flag_errno_math)
8679 enum built_in_function fcode = builtin_mathfn_code (arg0);
8681 if (fcode != END_BUILTINS)
8683 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8684 if (tem != NULL_TREE)
8690 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8691 && (TREE_CODE (arg0) == NOP_EXPR
8692 || TREE_CODE (arg0) == CONVERT_EXPR))
8694 /* If we are widening one operand of an integer comparison,
8695 see if the other operand is similarly being widened. Perhaps we
8696 can do the comparison in the narrower type. */
8697 tem = fold_widened_comparison (code, type, arg0, arg1);
8701 /* Or if we are changing signedness. */
8702 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8707 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8708 constant, we can simplify it. */
8709 if (TREE_CODE (arg1) == INTEGER_CST
8710 && (TREE_CODE (arg0) == MIN_EXPR
8711 || TREE_CODE (arg0) == MAX_EXPR)
8712 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8714 tem = optimize_minmax_comparison (code, type, op0, op1);
8719 /* Simplify comparison of something with itself. (For IEEE
8720 floating-point, we can only do some of these simplifications.) */
8721 if (operand_equal_p (arg0, arg1, 0))
8726 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8727 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8728 return constant_boolean_node (1, type);
8733 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8734 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8735 return constant_boolean_node (1, type);
8736 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8739 /* For NE, we can only do this simplification if integer
8740 or we don't honor IEEE floating point NaNs. */
8741 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8742 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8744 /* ... fall through ... */
8747 return constant_boolean_node (0, type);
8753 /* If we are comparing an expression that just has comparisons
8754 of two integer values, arithmetic expressions of those comparisons,
8755 and constants, we can simplify it. There are only three cases
8756 to check: the two values can either be equal, the first can be
8757 greater, or the second can be greater. Fold the expression for
8758 those three values. Since each value must be 0 or 1, we have
8759 eight possibilities, each of which corresponds to the constant 0
8760 or 1 or one of the six possible comparisons.
8762 This handles common cases like (a > b) == 0 but also handles
8763 expressions like ((x > y) - (y > x)) > 0, which supposedly
8764 occur in macroized code. */
8766 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8768 tree cval1 = 0, cval2 = 0;
8771 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8772 /* Don't handle degenerate cases here; they should already
8773 have been handled anyway. */
8774 && cval1 != 0 && cval2 != 0
8775 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8776 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8777 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8778 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8779 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8780 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8781 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8783 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8784 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8786 /* We can't just pass T to eval_subst in case cval1 or cval2
8787 was the same as ARG1. */
8790 = fold_build2 (code, type,
8791 eval_subst (arg0, cval1, maxval,
8795 = fold_build2 (code, type,
8796 eval_subst (arg0, cval1, maxval,
8800 = fold_build2 (code, type,
8801 eval_subst (arg0, cval1, minval,
8805 /* All three of these results should be 0 or 1. Confirm they are.
8806 Then use those values to select the proper code to use. */
8808 if (TREE_CODE (high_result) == INTEGER_CST
8809 && TREE_CODE (equal_result) == INTEGER_CST
8810 && TREE_CODE (low_result) == INTEGER_CST)
8812 /* Make a 3-bit mask with the high-order bit being the
8813 value for `>', the next for '=', and the low for '<'. */
8814 switch ((integer_onep (high_result) * 4)
8815 + (integer_onep (equal_result) * 2)
8816 + integer_onep (low_result))
8820 return omit_one_operand (type, integer_zero_node, arg0);
8841 return omit_one_operand (type, integer_one_node, arg0);
8845 return save_expr (build2 (code, type, cval1, cval2));
8846 return fold_build2 (code, type, cval1, cval2);
8851 /* Fold a comparison of the address of COMPONENT_REFs with the same
8852 type and component to a comparison of the address of the base
8853 object. In short, &x->a OP &y->a to x OP y and
8854 &x->a OP &y.a to x OP &y */
8855 if (TREE_CODE (arg0) == ADDR_EXPR
8856 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8857 && TREE_CODE (arg1) == ADDR_EXPR
8858 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8860 tree cref0 = TREE_OPERAND (arg0, 0);
8861 tree cref1 = TREE_OPERAND (arg1, 0);
8862 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8864 tree op0 = TREE_OPERAND (cref0, 0);
8865 tree op1 = TREE_OPERAND (cref1, 0);
8866 return fold_build2 (code, type,
8867 build_fold_addr_expr (op0),
8868 build_fold_addr_expr (op1));
8872 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8873 into a single range test. */
8874 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8875 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8876 && TREE_CODE (arg1) == INTEGER_CST
8877 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8878 && !integer_zerop (TREE_OPERAND (arg0, 1))
8879 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8880 && !TREE_OVERFLOW (arg1))
8882 tem = fold_div_compare (code, type, arg0, arg1);
8883 if (tem != NULL_TREE)
8887 /* Fold ~X op ~Y as Y op X. */
8888 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8889 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8891 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8892 return fold_build2 (code, type,
8893 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8894 TREE_OPERAND (arg0, 0));
8897 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8898 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8899 && TREE_CODE (arg1) == INTEGER_CST)
8901 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8902 return fold_build2 (swap_tree_comparison (code), type,
8903 TREE_OPERAND (arg0, 0),
8904 fold_build1 (BIT_NOT_EXPR, cmp_type,
8905 fold_convert (cmp_type, arg1)));
8912 /* Subroutine of fold_binary. Optimize complex multiplications of the
8913 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8914 argument EXPR represents the expression "z" of type TYPE. */
8917 fold_mult_zconjz (tree type, tree expr)
8919 tree itype = TREE_TYPE (type);
8920 tree rpart, ipart, tem;
8922 if (TREE_CODE (expr) == COMPLEX_EXPR)
8924 rpart = TREE_OPERAND (expr, 0);
8925 ipart = TREE_OPERAND (expr, 1);
8927 else if (TREE_CODE (expr) == COMPLEX_CST)
8929 rpart = TREE_REALPART (expr);
8930 ipart = TREE_IMAGPART (expr);
8934 expr = save_expr (expr);
8935 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8936 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8939 rpart = save_expr (rpart);
8940 ipart = save_expr (ipart);
8941 tem = fold_build2 (PLUS_EXPR, itype,
8942 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8943 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8944 return fold_build2 (COMPLEX_EXPR, type, tem,
8945 fold_convert (itype, integer_zero_node));
8949 /* Fold a binary expression of code CODE and type TYPE with operands
8950 OP0 and OP1. Return the folded expression if folding is
8951 successful. Otherwise, return NULL_TREE. */
8954 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8956 enum tree_code_class kind = TREE_CODE_CLASS (code);
8957 tree arg0, arg1, tem;
8958 tree t1 = NULL_TREE;
8959 bool strict_overflow_p;
8961 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8962 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8963 && TREE_CODE_LENGTH (code) == 2
8965 && op1 != NULL_TREE);
8970 /* Strip any conversions that don't change the mode. This is
8971 safe for every expression, except for a comparison expression
8972 because its signedness is derived from its operands. So, in
8973 the latter case, only strip conversions that don't change the
8976 Note that this is done as an internal manipulation within the
8977 constant folder, in order to find the simplest representation
8978 of the arguments so that their form can be studied. In any
8979 cases, the appropriate type conversions should be put back in
8980 the tree that will get out of the constant folder. */
8982 if (kind == tcc_comparison)
8984 STRIP_SIGN_NOPS (arg0);
8985 STRIP_SIGN_NOPS (arg1);
8993 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8994 constant but we can't do arithmetic on them. */
8995 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8996 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8997 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8998 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9000 if (kind == tcc_binary)
9001 tem = const_binop (code, arg0, arg1, 0);
9002 else if (kind == tcc_comparison)
9003 tem = fold_relational_const (code, type, arg0, arg1);
9007 if (tem != NULL_TREE)
9009 if (TREE_TYPE (tem) != type)
9010 tem = fold_convert (type, tem);
9015 /* If this is a commutative operation, and ARG0 is a constant, move it
9016 to ARG1 to reduce the number of tests below. */
9017 if (commutative_tree_code (code)
9018 && tree_swap_operands_p (arg0, arg1, true))
9019 return fold_build2 (code, type, op1, op0);
9021 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9023 First check for cases where an arithmetic operation is applied to a
9024 compound, conditional, or comparison operation. Push the arithmetic
9025 operation inside the compound or conditional to see if any folding
9026 can then be done. Convert comparison to conditional for this purpose.
9027 The also optimizes non-constant cases that used to be done in
9030 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9031 one of the operands is a comparison and the other is a comparison, a
9032 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9033 code below would make the expression more complex. Change it to a
9034 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9035 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9037 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9038 || code == EQ_EXPR || code == NE_EXPR)
9039 && ((truth_value_p (TREE_CODE (arg0))
9040 && (truth_value_p (TREE_CODE (arg1))
9041 || (TREE_CODE (arg1) == BIT_AND_EXPR
9042 && integer_onep (TREE_OPERAND (arg1, 1)))))
9043 || (truth_value_p (TREE_CODE (arg1))
9044 && (truth_value_p (TREE_CODE (arg0))
9045 || (TREE_CODE (arg0) == BIT_AND_EXPR
9046 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9048 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9049 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9052 fold_convert (boolean_type_node, arg0),
9053 fold_convert (boolean_type_node, arg1));
9055 if (code == EQ_EXPR)
9056 tem = invert_truthvalue (tem);
9058 return fold_convert (type, tem);
9061 if (TREE_CODE_CLASS (code) == tcc_binary
9062 || TREE_CODE_CLASS (code) == tcc_comparison)
9064 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9065 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9066 fold_build2 (code, type,
9067 TREE_OPERAND (arg0, 1), op1));
9068 if (TREE_CODE (arg1) == COMPOUND_EXPR
9069 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9070 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9071 fold_build2 (code, type,
9072 op0, TREE_OPERAND (arg1, 1)));
9074 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9076 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9078 /*cond_first_p=*/1);
9079 if (tem != NULL_TREE)
9083 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9085 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9087 /*cond_first_p=*/0);
9088 if (tem != NULL_TREE)
9095 case POINTER_PLUS_EXPR:
9096 /* 0 +p index -> (type)index */
9097 if (integer_zerop (arg0))
9098 return non_lvalue (fold_convert (type, arg1));
9100 /* PTR +p 0 -> PTR */
9101 if (integer_zerop (arg1))
9102 return non_lvalue (fold_convert (type, arg0));
9104 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9105 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9106 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9107 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9108 fold_convert (sizetype, arg1),
9109 fold_convert (sizetype, arg0)));
9111 /* index +p PTR -> PTR +p index */
9112 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9113 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9114 return fold_build2 (POINTER_PLUS_EXPR, type,
9115 fold_convert (type, arg1), fold_convert (sizetype, arg0));
9117 /* (PTR +p B) +p A -> PTR +p (B + A) */
9118 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9121 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9122 tree arg00 = TREE_OPERAND (arg0, 0);
9123 inner = fold_build2 (PLUS_EXPR, sizetype, arg01, fold_convert (sizetype, arg1));
9124 return fold_build2 (POINTER_PLUS_EXPR, type, arg00, inner);
9127 /* PTR_CST +p CST -> CST1 */
9128 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9129 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9131 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9132 of the array. Loop optimizer sometimes produce this type of
9134 if (TREE_CODE (arg0) == ADDR_EXPR)
9136 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9138 return fold_convert (type, tem);
9143 /* PTR + INT -> (INT)(PTR p+ INT) */
9144 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9145 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9146 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9149 fold_convert (sizetype, arg1)));
9150 /* INT + PTR -> (INT)(PTR p+ INT) */
9151 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9152 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9153 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9156 fold_convert (sizetype, arg0)));
9157 /* A + (-B) -> A - B */
9158 if (TREE_CODE (arg1) == NEGATE_EXPR)
9159 return fold_build2 (MINUS_EXPR, type,
9160 fold_convert (type, arg0),
9161 fold_convert (type, TREE_OPERAND (arg1, 0)));
9162 /* (-A) + B -> B - A */
9163 if (TREE_CODE (arg0) == NEGATE_EXPR
9164 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9165 return fold_build2 (MINUS_EXPR, type,
9166 fold_convert (type, arg1),
9167 fold_convert (type, TREE_OPERAND (arg0, 0)));
9168 /* Convert ~A + 1 to -A. */
9169 if (INTEGRAL_TYPE_P (type)
9170 && TREE_CODE (arg0) == BIT_NOT_EXPR
9171 && integer_onep (arg1))
9172 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9174 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9176 if ((TREE_CODE (arg0) == MULT_EXPR
9177 || TREE_CODE (arg1) == MULT_EXPR)
9178 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9180 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9185 if (! FLOAT_TYPE_P (type))
9187 if (integer_zerop (arg1))
9188 return non_lvalue (fold_convert (type, arg0));
9191 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9192 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9193 && !TYPE_OVERFLOW_TRAPS (type))
9195 t1 = build_int_cst_type (type, -1);
9196 return omit_one_operand (type, t1, arg1);
9200 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9201 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9202 && !TYPE_OVERFLOW_TRAPS (type))
9204 t1 = build_int_cst_type (type, -1);
9205 return omit_one_operand (type, t1, arg0);
9208 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9209 with a constant, and the two constants have no bits in common,
9210 we should treat this as a BIT_IOR_EXPR since this may produce more
9212 if (TREE_CODE (arg0) == BIT_AND_EXPR
9213 && TREE_CODE (arg1) == BIT_AND_EXPR
9214 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9215 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9216 && integer_zerop (const_binop (BIT_AND_EXPR,
9217 TREE_OPERAND (arg0, 1),
9218 TREE_OPERAND (arg1, 1), 0)))
9220 code = BIT_IOR_EXPR;
9224 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9225 (plus (plus (mult) (mult)) (foo)) so that we can
9226 take advantage of the factoring cases below. */
9227 if (((TREE_CODE (arg0) == PLUS_EXPR
9228 || TREE_CODE (arg0) == MINUS_EXPR)
9229 && TREE_CODE (arg1) == MULT_EXPR)
9230 || ((TREE_CODE (arg1) == PLUS_EXPR
9231 || TREE_CODE (arg1) == MINUS_EXPR)
9232 && TREE_CODE (arg0) == MULT_EXPR))
9234 tree parg0, parg1, parg, marg;
9235 enum tree_code pcode;
9237 if (TREE_CODE (arg1) == MULT_EXPR)
9238 parg = arg0, marg = arg1;
9240 parg = arg1, marg = arg0;
9241 pcode = TREE_CODE (parg);
9242 parg0 = TREE_OPERAND (parg, 0);
9243 parg1 = TREE_OPERAND (parg, 1);
9247 if (TREE_CODE (parg0) == MULT_EXPR
9248 && TREE_CODE (parg1) != MULT_EXPR)
9249 return fold_build2 (pcode, type,
9250 fold_build2 (PLUS_EXPR, type,
9251 fold_convert (type, parg0),
9252 fold_convert (type, marg)),
9253 fold_convert (type, parg1));
9254 if (TREE_CODE (parg0) != MULT_EXPR
9255 && TREE_CODE (parg1) == MULT_EXPR)
9256 return fold_build2 (PLUS_EXPR, type,
9257 fold_convert (type, parg0),
9258 fold_build2 (pcode, type,
9259 fold_convert (type, marg),
9266 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9267 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9268 return non_lvalue (fold_convert (type, arg0));
9270 /* Likewise if the operands are reversed. */
9271 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9272 return non_lvalue (fold_convert (type, arg1));
9274 /* Convert X + -C into X - C. */
9275 if (TREE_CODE (arg1) == REAL_CST
9276 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9278 tem = fold_negate_const (arg1, type);
9279 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9280 return fold_build2 (MINUS_EXPR, type,
9281 fold_convert (type, arg0),
9282 fold_convert (type, tem));
9285 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9286 to __complex__ ( x, y ). This is not the same for SNaNs or
9287 if signed zeros are involved. */
9288 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9289 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9290 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9292 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9293 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9294 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9295 bool arg0rz = false, arg0iz = false;
9296 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9297 || (arg0i && (arg0iz = real_zerop (arg0i))))
9299 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9300 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9301 if (arg0rz && arg1i && real_zerop (arg1i))
9303 tree rp = arg1r ? arg1r
9304 : build1 (REALPART_EXPR, rtype, arg1);
9305 tree ip = arg0i ? arg0i
9306 : build1 (IMAGPART_EXPR, rtype, arg0);
9307 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9309 else if (arg0iz && arg1r && real_zerop (arg1r))
9311 tree rp = arg0r ? arg0r
9312 : build1 (REALPART_EXPR, rtype, arg0);
9313 tree ip = arg1i ? arg1i
9314 : build1 (IMAGPART_EXPR, rtype, arg1);
9315 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9320 if (flag_unsafe_math_optimizations
9321 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9322 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9323 && (tem = distribute_real_division (code, type, arg0, arg1)))
9326 /* Convert x+x into x*2.0. */
9327 if (operand_equal_p (arg0, arg1, 0)
9328 && SCALAR_FLOAT_TYPE_P (type))
9329 return fold_build2 (MULT_EXPR, type, arg0,
9330 build_real (type, dconst2));
9332 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9333 if (flag_unsafe_math_optimizations
9334 && TREE_CODE (arg1) == PLUS_EXPR
9335 && TREE_CODE (arg0) != MULT_EXPR)
9337 tree tree10 = TREE_OPERAND (arg1, 0);
9338 tree tree11 = TREE_OPERAND (arg1, 1);
9339 if (TREE_CODE (tree11) == MULT_EXPR
9340 && TREE_CODE (tree10) == MULT_EXPR)
9343 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9344 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9347 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9348 if (flag_unsafe_math_optimizations
9349 && TREE_CODE (arg0) == PLUS_EXPR
9350 && TREE_CODE (arg1) != MULT_EXPR)
9352 tree tree00 = TREE_OPERAND (arg0, 0);
9353 tree tree01 = TREE_OPERAND (arg0, 1);
9354 if (TREE_CODE (tree01) == MULT_EXPR
9355 && TREE_CODE (tree00) == MULT_EXPR)
9358 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9359 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9365 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9366 is a rotate of A by C1 bits. */
9367 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9368 is a rotate of A by B bits. */
9370 enum tree_code code0, code1;
9371 code0 = TREE_CODE (arg0);
9372 code1 = TREE_CODE (arg1);
9373 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9374 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9375 && operand_equal_p (TREE_OPERAND (arg0, 0),
9376 TREE_OPERAND (arg1, 0), 0)
9377 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9379 tree tree01, tree11;
9380 enum tree_code code01, code11;
9382 tree01 = TREE_OPERAND (arg0, 1);
9383 tree11 = TREE_OPERAND (arg1, 1);
9384 STRIP_NOPS (tree01);
9385 STRIP_NOPS (tree11);
9386 code01 = TREE_CODE (tree01);
9387 code11 = TREE_CODE (tree11);
9388 if (code01 == INTEGER_CST
9389 && code11 == INTEGER_CST
9390 && TREE_INT_CST_HIGH (tree01) == 0
9391 && TREE_INT_CST_HIGH (tree11) == 0
9392 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9393 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9394 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9395 code0 == LSHIFT_EXPR ? tree01 : tree11);
9396 else if (code11 == MINUS_EXPR)
9398 tree tree110, tree111;
9399 tree110 = TREE_OPERAND (tree11, 0);
9400 tree111 = TREE_OPERAND (tree11, 1);
9401 STRIP_NOPS (tree110);
9402 STRIP_NOPS (tree111);
9403 if (TREE_CODE (tree110) == INTEGER_CST
9404 && 0 == compare_tree_int (tree110,
9406 (TREE_TYPE (TREE_OPERAND
9408 && operand_equal_p (tree01, tree111, 0))
9409 return build2 ((code0 == LSHIFT_EXPR
9412 type, TREE_OPERAND (arg0, 0), tree01);
9414 else if (code01 == MINUS_EXPR)
9416 tree tree010, tree011;
9417 tree010 = TREE_OPERAND (tree01, 0);
9418 tree011 = TREE_OPERAND (tree01, 1);
9419 STRIP_NOPS (tree010);
9420 STRIP_NOPS (tree011);
9421 if (TREE_CODE (tree010) == INTEGER_CST
9422 && 0 == compare_tree_int (tree010,
9424 (TREE_TYPE (TREE_OPERAND
9426 && operand_equal_p (tree11, tree011, 0))
9427 return build2 ((code0 != LSHIFT_EXPR
9430 type, TREE_OPERAND (arg0, 0), tree11);
9436 /* In most languages, can't associate operations on floats through
9437 parentheses. Rather than remember where the parentheses were, we
9438 don't associate floats at all, unless the user has specified
9439 -funsafe-math-optimizations. */
9441 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9443 tree var0, con0, lit0, minus_lit0;
9444 tree var1, con1, lit1, minus_lit1;
9447 /* Split both trees into variables, constants, and literals. Then
9448 associate each group together, the constants with literals,
9449 then the result with variables. This increases the chances of
9450 literals being recombined later and of generating relocatable
9451 expressions for the sum of a constant and literal. */
9452 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9453 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9454 code == MINUS_EXPR);
9456 /* With undefined overflow we can only associate constants
9457 with one variable. */
9458 if ((POINTER_TYPE_P (type)
9459 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9465 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9466 tmp0 = TREE_OPERAND (tmp0, 0);
9467 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9468 tmp1 = TREE_OPERAND (tmp1, 0);
9469 /* The only case we can still associate with two variables
9470 is if they are the same, modulo negation. */
9471 if (!operand_equal_p (tmp0, tmp1, 0))
9475 /* Only do something if we found more than two objects. Otherwise,
9476 nothing has changed and we risk infinite recursion. */
9478 && (2 < ((var0 != 0) + (var1 != 0)
9479 + (con0 != 0) + (con1 != 0)
9480 + (lit0 != 0) + (lit1 != 0)
9481 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9483 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9484 if (code == MINUS_EXPR)
9487 var0 = associate_trees (var0, var1, code, type);
9488 con0 = associate_trees (con0, con1, code, type);
9489 lit0 = associate_trees (lit0, lit1, code, type);
9490 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9492 /* Preserve the MINUS_EXPR if the negative part of the literal is
9493 greater than the positive part. Otherwise, the multiplicative
9494 folding code (i.e extract_muldiv) may be fooled in case
9495 unsigned constants are subtracted, like in the following
9496 example: ((X*2 + 4) - 8U)/2. */
9497 if (minus_lit0 && lit0)
9499 if (TREE_CODE (lit0) == INTEGER_CST
9500 && TREE_CODE (minus_lit0) == INTEGER_CST
9501 && tree_int_cst_lt (lit0, minus_lit0))
9503 minus_lit0 = associate_trees (minus_lit0, lit0,
9509 lit0 = associate_trees (lit0, minus_lit0,
9517 return fold_convert (type,
9518 associate_trees (var0, minus_lit0,
9522 con0 = associate_trees (con0, minus_lit0,
9524 return fold_convert (type,
9525 associate_trees (var0, con0,
9530 con0 = associate_trees (con0, lit0, code, type);
9531 return fold_convert (type, associate_trees (var0, con0,
9539 /* Pointer simplifications for subtraction, simple reassociations. */
9540 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9542 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9543 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9544 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9546 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9547 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9548 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9549 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9550 return fold_build2 (PLUS_EXPR, type,
9551 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9552 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9554 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9555 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9557 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9558 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9559 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9561 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9564 /* A - (-B) -> A + B */
9565 if (TREE_CODE (arg1) == NEGATE_EXPR)
9566 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9567 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9568 if (TREE_CODE (arg0) == NEGATE_EXPR
9569 && (FLOAT_TYPE_P (type)
9570 || INTEGRAL_TYPE_P (type))
9571 && negate_expr_p (arg1)
9572 && reorder_operands_p (arg0, arg1))
9573 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9574 TREE_OPERAND (arg0, 0));
9575 /* Convert -A - 1 to ~A. */
9576 if (INTEGRAL_TYPE_P (type)
9577 && TREE_CODE (arg0) == NEGATE_EXPR
9578 && integer_onep (arg1)
9579 && !TYPE_OVERFLOW_TRAPS (type))
9580 return fold_build1 (BIT_NOT_EXPR, type,
9581 fold_convert (type, TREE_OPERAND (arg0, 0)));
9583 /* Convert -1 - A to ~A. */
9584 if (INTEGRAL_TYPE_P (type)
9585 && integer_all_onesp (arg0))
9586 return fold_build1 (BIT_NOT_EXPR, type, op1);
9588 if (! FLOAT_TYPE_P (type))
9590 if (integer_zerop (arg0))
9591 return negate_expr (fold_convert (type, arg1));
9592 if (integer_zerop (arg1))
9593 return non_lvalue (fold_convert (type, arg0));
9595 /* Fold A - (A & B) into ~B & A. */
9596 if (!TREE_SIDE_EFFECTS (arg0)
9597 && TREE_CODE (arg1) == BIT_AND_EXPR)
9599 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9600 return fold_build2 (BIT_AND_EXPR, type,
9601 fold_build1 (BIT_NOT_EXPR, type,
9602 TREE_OPERAND (arg1, 0)),
9604 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9605 return fold_build2 (BIT_AND_EXPR, type,
9606 fold_build1 (BIT_NOT_EXPR, type,
9607 TREE_OPERAND (arg1, 1)),
9611 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9612 any power of 2 minus 1. */
9613 if (TREE_CODE (arg0) == BIT_AND_EXPR
9614 && TREE_CODE (arg1) == BIT_AND_EXPR
9615 && operand_equal_p (TREE_OPERAND (arg0, 0),
9616 TREE_OPERAND (arg1, 0), 0))
9618 tree mask0 = TREE_OPERAND (arg0, 1);
9619 tree mask1 = TREE_OPERAND (arg1, 1);
9620 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9622 if (operand_equal_p (tem, mask1, 0))
9624 tem = fold_build2 (BIT_XOR_EXPR, type,
9625 TREE_OPERAND (arg0, 0), mask1);
9626 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9631 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9632 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9633 return non_lvalue (fold_convert (type, arg0));
9635 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9636 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9637 (-ARG1 + ARG0) reduces to -ARG1. */
9638 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9639 return negate_expr (fold_convert (type, arg1));
9641 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9642 __complex__ ( x, -y ). This is not the same for SNaNs or if
9643 signed zeros are involved. */
9644 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9646 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9648 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9649 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9650 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9651 bool arg0rz = false, arg0iz = false;
9652 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9653 || (arg0i && (arg0iz = real_zerop (arg0i))))
9655 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9656 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9657 if (arg0rz && arg1i && real_zerop (arg1i))
9659 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9661 : build1 (REALPART_EXPR, rtype, arg1));
9662 tree ip = arg0i ? arg0i
9663 : build1 (IMAGPART_EXPR, rtype, arg0);
9664 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9666 else if (arg0iz && arg1r && real_zerop (arg1r))
9668 tree rp = arg0r ? arg0r
9669 : build1 (REALPART_EXPR, rtype, arg0);
9670 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9672 : build1 (IMAGPART_EXPR, rtype, arg1));
9673 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9678 /* Fold &x - &x. This can happen from &x.foo - &x.
9679 This is unsafe for certain floats even in non-IEEE formats.
9680 In IEEE, it is unsafe because it does wrong for NaNs.
9681 Also note that operand_equal_p is always false if an operand
9684 if ((! FLOAT_TYPE_P (type)
9685 || (flag_unsafe_math_optimizations
9686 && !HONOR_NANS (TYPE_MODE (type))
9687 && !HONOR_INFINITIES (TYPE_MODE (type))))
9688 && operand_equal_p (arg0, arg1, 0))
9689 return fold_convert (type, integer_zero_node);
9691 /* A - B -> A + (-B) if B is easily negatable. */
9692 if (negate_expr_p (arg1)
9693 && ((FLOAT_TYPE_P (type)
9694 /* Avoid this transformation if B is a positive REAL_CST. */
9695 && (TREE_CODE (arg1) != REAL_CST
9696 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9697 || INTEGRAL_TYPE_P (type)))
9698 return fold_build2 (PLUS_EXPR, type,
9699 fold_convert (type, arg0),
9700 fold_convert (type, negate_expr (arg1)));
9702 /* Try folding difference of addresses. */
9706 if ((TREE_CODE (arg0) == ADDR_EXPR
9707 || TREE_CODE (arg1) == ADDR_EXPR)
9708 && ptr_difference_const (arg0, arg1, &diff))
9709 return build_int_cst_type (type, diff);
9712 /* Fold &a[i] - &a[j] to i-j. */
9713 if (TREE_CODE (arg0) == ADDR_EXPR
9714 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9715 && TREE_CODE (arg1) == ADDR_EXPR
9716 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9718 tree aref0 = TREE_OPERAND (arg0, 0);
9719 tree aref1 = TREE_OPERAND (arg1, 0);
9720 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9721 TREE_OPERAND (aref1, 0), 0))
9723 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9724 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9725 tree esz = array_ref_element_size (aref0);
9726 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9727 return fold_build2 (MULT_EXPR, type, diff,
9728 fold_convert (type, esz));
9733 if (flag_unsafe_math_optimizations
9734 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9735 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9736 && (tem = distribute_real_division (code, type, arg0, arg1)))
9739 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9741 if ((TREE_CODE (arg0) == MULT_EXPR
9742 || TREE_CODE (arg1) == MULT_EXPR)
9743 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9745 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9753 /* (-A) * (-B) -> A * B */
9754 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9755 return fold_build2 (MULT_EXPR, type,
9756 fold_convert (type, TREE_OPERAND (arg0, 0)),
9757 fold_convert (type, negate_expr (arg1)));
9758 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9759 return fold_build2 (MULT_EXPR, type,
9760 fold_convert (type, negate_expr (arg0)),
9761 fold_convert (type, TREE_OPERAND (arg1, 0)));
9763 if (! FLOAT_TYPE_P (type))
9765 if (integer_zerop (arg1))
9766 return omit_one_operand (type, arg1, arg0);
9767 if (integer_onep (arg1))
9768 return non_lvalue (fold_convert (type, arg0));
9769 /* Transform x * -1 into -x. */
9770 if (integer_all_onesp (arg1))
9771 return fold_convert (type, negate_expr (arg0));
9772 /* Transform x * -C into -x * C if x is easily negatable. */
9773 if (TREE_CODE (arg1) == INTEGER_CST
9774 && tree_int_cst_sgn (arg1) == -1
9775 && negate_expr_p (arg0)
9776 && (tem = negate_expr (arg1)) != arg1
9777 && !TREE_OVERFLOW (tem))
9778 return fold_build2 (MULT_EXPR, type,
9779 negate_expr (arg0), tem);
9781 /* (a * (1 << b)) is (a << b) */
9782 if (TREE_CODE (arg1) == LSHIFT_EXPR
9783 && integer_onep (TREE_OPERAND (arg1, 0)))
9784 return fold_build2 (LSHIFT_EXPR, type, arg0,
9785 TREE_OPERAND (arg1, 1));
9786 if (TREE_CODE (arg0) == LSHIFT_EXPR
9787 && integer_onep (TREE_OPERAND (arg0, 0)))
9788 return fold_build2 (LSHIFT_EXPR, type, arg1,
9789 TREE_OPERAND (arg0, 1));
9791 strict_overflow_p = false;
9792 if (TREE_CODE (arg1) == INTEGER_CST
9793 && 0 != (tem = extract_muldiv (op0,
9794 fold_convert (type, arg1),
9796 &strict_overflow_p)))
9798 if (strict_overflow_p)
9799 fold_overflow_warning (("assuming signed overflow does not "
9800 "occur when simplifying "
9802 WARN_STRICT_OVERFLOW_MISC);
9803 return fold_convert (type, tem);
9806 /* Optimize z * conj(z) for integer complex numbers. */
9807 if (TREE_CODE (arg0) == CONJ_EXPR
9808 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9809 return fold_mult_zconjz (type, arg1);
9810 if (TREE_CODE (arg1) == CONJ_EXPR
9811 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9812 return fold_mult_zconjz (type, arg0);
9816 /* Maybe fold x * 0 to 0. The expressions aren't the same
9817 when x is NaN, since x * 0 is also NaN. Nor are they the
9818 same in modes with signed zeros, since multiplying a
9819 negative value by 0 gives -0, not +0. */
9820 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9821 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9822 && real_zerop (arg1))
9823 return omit_one_operand (type, arg1, arg0);
9824 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9825 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9826 && real_onep (arg1))
9827 return non_lvalue (fold_convert (type, arg0));
9829 /* Transform x * -1.0 into -x. */
9830 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9831 && real_minus_onep (arg1))
9832 return fold_convert (type, negate_expr (arg0));
9834 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9835 if (flag_unsafe_math_optimizations
9836 && TREE_CODE (arg0) == RDIV_EXPR
9837 && TREE_CODE (arg1) == REAL_CST
9838 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9840 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9843 return fold_build2 (RDIV_EXPR, type, tem,
9844 TREE_OPERAND (arg0, 1));
9847 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9848 if (operand_equal_p (arg0, arg1, 0))
9850 tree tem = fold_strip_sign_ops (arg0);
9851 if (tem != NULL_TREE)
9853 tem = fold_convert (type, tem);
9854 return fold_build2 (MULT_EXPR, type, tem, tem);
9858 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9859 This is not the same for NaNs or if signed zeros are
9861 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9862 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9863 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9864 && TREE_CODE (arg1) == COMPLEX_CST
9865 && real_zerop (TREE_REALPART (arg1)))
9867 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9868 if (real_onep (TREE_IMAGPART (arg1)))
9869 return fold_build2 (COMPLEX_EXPR, type,
9870 negate_expr (fold_build1 (IMAGPART_EXPR,
9872 fold_build1 (REALPART_EXPR, rtype, arg0));
9873 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9874 return fold_build2 (COMPLEX_EXPR, type,
9875 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9876 negate_expr (fold_build1 (REALPART_EXPR,
9880 /* Optimize z * conj(z) for floating point complex numbers.
9881 Guarded by flag_unsafe_math_optimizations as non-finite
9882 imaginary components don't produce scalar results. */
9883 if (flag_unsafe_math_optimizations
9884 && TREE_CODE (arg0) == CONJ_EXPR
9885 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9886 return fold_mult_zconjz (type, arg1);
9887 if (flag_unsafe_math_optimizations
9888 && TREE_CODE (arg1) == CONJ_EXPR
9889 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9890 return fold_mult_zconjz (type, arg0);
9892 if (flag_unsafe_math_optimizations)
9894 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9895 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9897 /* Optimizations of root(...)*root(...). */
9898 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9901 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9902 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9904 /* Optimize sqrt(x)*sqrt(x) as x. */
9905 if (BUILTIN_SQRT_P (fcode0)
9906 && operand_equal_p (arg00, arg10, 0)
9907 && ! HONOR_SNANS (TYPE_MODE (type)))
9910 /* Optimize root(x)*root(y) as root(x*y). */
9911 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9912 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9913 return build_call_expr (rootfn, 1, arg);
9916 /* Optimize expN(x)*expN(y) as expN(x+y). */
9917 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9919 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9920 tree arg = fold_build2 (PLUS_EXPR, type,
9921 CALL_EXPR_ARG (arg0, 0),
9922 CALL_EXPR_ARG (arg1, 0));
9923 return build_call_expr (expfn, 1, arg);
9926 /* Optimizations of pow(...)*pow(...). */
9927 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9928 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9929 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9931 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9932 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9933 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9934 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9936 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9937 if (operand_equal_p (arg01, arg11, 0))
9939 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9940 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9941 return build_call_expr (powfn, 2, arg, arg01);
9944 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9945 if (operand_equal_p (arg00, arg10, 0))
9947 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9948 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9949 return build_call_expr (powfn, 2, arg00, arg);
9953 /* Optimize tan(x)*cos(x) as sin(x). */
9954 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9955 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9956 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9957 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9958 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9959 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9960 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9961 CALL_EXPR_ARG (arg1, 0), 0))
9963 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9965 if (sinfn != NULL_TREE)
9966 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9969 /* Optimize x*pow(x,c) as pow(x,c+1). */
9970 if (fcode1 == BUILT_IN_POW
9971 || fcode1 == BUILT_IN_POWF
9972 || fcode1 == BUILT_IN_POWL)
9974 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9975 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9976 if (TREE_CODE (arg11) == REAL_CST
9977 && !TREE_OVERFLOW (arg11)
9978 && operand_equal_p (arg0, arg10, 0))
9980 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9984 c = TREE_REAL_CST (arg11);
9985 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9986 arg = build_real (type, c);
9987 return build_call_expr (powfn, 2, arg0, arg);
9991 /* Optimize pow(x,c)*x as pow(x,c+1). */
9992 if (fcode0 == BUILT_IN_POW
9993 || fcode0 == BUILT_IN_POWF
9994 || fcode0 == BUILT_IN_POWL)
9996 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9997 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9998 if (TREE_CODE (arg01) == REAL_CST
9999 && !TREE_OVERFLOW (arg01)
10000 && operand_equal_p (arg1, arg00, 0))
10002 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10006 c = TREE_REAL_CST (arg01);
10007 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10008 arg = build_real (type, c);
10009 return build_call_expr (powfn, 2, arg1, arg);
10013 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10014 if (! optimize_size
10015 && operand_equal_p (arg0, arg1, 0))
10017 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10021 tree arg = build_real (type, dconst2);
10022 return build_call_expr (powfn, 2, arg0, arg);
10031 if (integer_all_onesp (arg1))
10032 return omit_one_operand (type, arg1, arg0);
10033 if (integer_zerop (arg1))
10034 return non_lvalue (fold_convert (type, arg0));
10035 if (operand_equal_p (arg0, arg1, 0))
10036 return non_lvalue (fold_convert (type, arg0));
10038 /* ~X | X is -1. */
10039 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10040 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10042 t1 = build_int_cst_type (type, -1);
10043 return omit_one_operand (type, t1, arg1);
10046 /* X | ~X is -1. */
10047 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10048 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10050 t1 = build_int_cst_type (type, -1);
10051 return omit_one_operand (type, t1, arg0);
10054 /* Canonicalize (X & C1) | C2. */
10055 if (TREE_CODE (arg0) == BIT_AND_EXPR
10056 && TREE_CODE (arg1) == INTEGER_CST
10057 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10059 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10060 int width = TYPE_PRECISION (type);
10061 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10062 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10063 hi2 = TREE_INT_CST_HIGH (arg1);
10064 lo2 = TREE_INT_CST_LOW (arg1);
10066 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10067 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10068 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10070 if (width > HOST_BITS_PER_WIDE_INT)
10072 mhi = (unsigned HOST_WIDE_INT) -1
10073 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10079 mlo = (unsigned HOST_WIDE_INT) -1
10080 >> (HOST_BITS_PER_WIDE_INT - width);
10083 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10084 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10085 return fold_build2 (BIT_IOR_EXPR, type,
10086 TREE_OPERAND (arg0, 0), arg1);
10088 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10091 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10092 return fold_build2 (BIT_IOR_EXPR, type,
10093 fold_build2 (BIT_AND_EXPR, type,
10094 TREE_OPERAND (arg0, 0),
10095 build_int_cst_wide (type,
10101 /* (X & Y) | Y is (X, Y). */
10102 if (TREE_CODE (arg0) == BIT_AND_EXPR
10103 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10104 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10105 /* (X & Y) | X is (Y, X). */
10106 if (TREE_CODE (arg0) == BIT_AND_EXPR
10107 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10108 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10109 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10110 /* X | (X & Y) is (Y, X). */
10111 if (TREE_CODE (arg1) == BIT_AND_EXPR
10112 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10113 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10114 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10115 /* X | (Y & X) is (Y, X). */
10116 if (TREE_CODE (arg1) == BIT_AND_EXPR
10117 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10118 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10119 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10121 t1 = distribute_bit_expr (code, type, arg0, arg1);
10122 if (t1 != NULL_TREE)
10125 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10127 This results in more efficient code for machines without a NAND
10128 instruction. Combine will canonicalize to the first form
10129 which will allow use of NAND instructions provided by the
10130 backend if they exist. */
10131 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10132 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10134 return fold_build1 (BIT_NOT_EXPR, type,
10135 build2 (BIT_AND_EXPR, type,
10136 TREE_OPERAND (arg0, 0),
10137 TREE_OPERAND (arg1, 0)));
10140 /* See if this can be simplified into a rotate first. If that
10141 is unsuccessful continue in the association code. */
10145 if (integer_zerop (arg1))
10146 return non_lvalue (fold_convert (type, arg0));
10147 if (integer_all_onesp (arg1))
10148 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10149 if (operand_equal_p (arg0, arg1, 0))
10150 return omit_one_operand (type, integer_zero_node, arg0);
10152 /* ~X ^ X is -1. */
10153 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10156 t1 = build_int_cst_type (type, -1);
10157 return omit_one_operand (type, t1, arg1);
10160 /* X ^ ~X is -1. */
10161 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10162 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10164 t1 = build_int_cst_type (type, -1);
10165 return omit_one_operand (type, t1, arg0);
10168 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10169 with a constant, and the two constants have no bits in common,
10170 we should treat this as a BIT_IOR_EXPR since this may produce more
10171 simplifications. */
10172 if (TREE_CODE (arg0) == BIT_AND_EXPR
10173 && TREE_CODE (arg1) == BIT_AND_EXPR
10174 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10175 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10176 && integer_zerop (const_binop (BIT_AND_EXPR,
10177 TREE_OPERAND (arg0, 1),
10178 TREE_OPERAND (arg1, 1), 0)))
10180 code = BIT_IOR_EXPR;
10184 /* (X | Y) ^ X -> Y & ~ X*/
10185 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10186 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10188 tree t2 = TREE_OPERAND (arg0, 1);
10189 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10191 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10192 fold_convert (type, t1));
10196 /* (Y | X) ^ X -> Y & ~ X*/
10197 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10198 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10200 tree t2 = TREE_OPERAND (arg0, 0);
10201 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10203 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10204 fold_convert (type, t1));
10208 /* X ^ (X | Y) -> Y & ~ X*/
10209 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10210 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10212 tree t2 = TREE_OPERAND (arg1, 1);
10213 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10215 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10216 fold_convert (type, t1));
10220 /* X ^ (Y | X) -> Y & ~ X*/
10221 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10222 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10224 tree t2 = TREE_OPERAND (arg1, 0);
10225 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10227 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10228 fold_convert (type, t1));
10232 /* Convert ~X ^ ~Y to X ^ Y. */
10233 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10234 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10235 return fold_build2 (code, type,
10236 fold_convert (type, TREE_OPERAND (arg0, 0)),
10237 fold_convert (type, TREE_OPERAND (arg1, 0)));
10239 /* Convert ~X ^ C to X ^ ~C. */
10240 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10241 && TREE_CODE (arg1) == INTEGER_CST)
10242 return fold_build2 (code, type,
10243 fold_convert (type, TREE_OPERAND (arg0, 0)),
10244 fold_build1 (BIT_NOT_EXPR, type, arg1));
10246 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10247 if (TREE_CODE (arg0) == BIT_AND_EXPR
10248 && integer_onep (TREE_OPERAND (arg0, 1))
10249 && integer_onep (arg1))
10250 return fold_build2 (EQ_EXPR, type, arg0,
10251 build_int_cst (TREE_TYPE (arg0), 0));
10253 /* Fold (X & Y) ^ Y as ~X & Y. */
10254 if (TREE_CODE (arg0) == BIT_AND_EXPR
10255 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10257 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10258 return fold_build2 (BIT_AND_EXPR, type,
10259 fold_build1 (BIT_NOT_EXPR, type, tem),
10260 fold_convert (type, arg1));
10262 /* Fold (X & Y) ^ X as ~Y & X. */
10263 if (TREE_CODE (arg0) == BIT_AND_EXPR
10264 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10265 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10267 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10268 return fold_build2 (BIT_AND_EXPR, type,
10269 fold_build1 (BIT_NOT_EXPR, type, tem),
10270 fold_convert (type, arg1));
10272 /* Fold X ^ (X & Y) as X & ~Y. */
10273 if (TREE_CODE (arg1) == BIT_AND_EXPR
10274 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10276 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10277 return fold_build2 (BIT_AND_EXPR, type,
10278 fold_convert (type, arg0),
10279 fold_build1 (BIT_NOT_EXPR, type, tem));
10281 /* Fold X ^ (Y & X) as ~Y & X. */
10282 if (TREE_CODE (arg1) == BIT_AND_EXPR
10283 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10284 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10286 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10287 return fold_build2 (BIT_AND_EXPR, type,
10288 fold_build1 (BIT_NOT_EXPR, type, tem),
10289 fold_convert (type, arg0));
10292 /* See if this can be simplified into a rotate first. If that
10293 is unsuccessful continue in the association code. */
10297 if (integer_all_onesp (arg1))
10298 return non_lvalue (fold_convert (type, arg0));
10299 if (integer_zerop (arg1))
10300 return omit_one_operand (type, arg1, arg0);
10301 if (operand_equal_p (arg0, arg1, 0))
10302 return non_lvalue (fold_convert (type, arg0));
10304 /* ~X & X is always zero. */
10305 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10306 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10307 return omit_one_operand (type, integer_zero_node, arg1);
10309 /* X & ~X is always zero. */
10310 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10311 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10312 return omit_one_operand (type, integer_zero_node, arg0);
10314 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10315 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10316 && TREE_CODE (arg1) == INTEGER_CST
10317 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10318 return fold_build2 (BIT_IOR_EXPR, type,
10319 fold_build2 (BIT_AND_EXPR, type,
10320 TREE_OPERAND (arg0, 0), arg1),
10321 fold_build2 (BIT_AND_EXPR, type,
10322 TREE_OPERAND (arg0, 1), arg1));
10324 /* (X | Y) & Y is (X, Y). */
10325 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10326 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10327 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10328 /* (X | Y) & X is (Y, X). */
10329 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10330 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10331 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10332 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10333 /* X & (X | Y) is (Y, X). */
10334 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10336 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10337 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10338 /* X & (Y | X) is (Y, X). */
10339 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10340 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10341 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10342 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10344 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10345 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10346 && integer_onep (TREE_OPERAND (arg0, 1))
10347 && integer_onep (arg1))
10349 tem = TREE_OPERAND (arg0, 0);
10350 return fold_build2 (EQ_EXPR, type,
10351 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10352 build_int_cst (TREE_TYPE (tem), 1)),
10353 build_int_cst (TREE_TYPE (tem), 0));
10355 /* Fold ~X & 1 as (X & 1) == 0. */
10356 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10357 && integer_onep (arg1))
10359 tem = TREE_OPERAND (arg0, 0);
10360 return fold_build2 (EQ_EXPR, type,
10361 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10362 build_int_cst (TREE_TYPE (tem), 1)),
10363 build_int_cst (TREE_TYPE (tem), 0));
10366 /* Fold (X ^ Y) & Y as ~X & Y. */
10367 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10368 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10370 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10371 return fold_build2 (BIT_AND_EXPR, type,
10372 fold_build1 (BIT_NOT_EXPR, type, tem),
10373 fold_convert (type, arg1));
10375 /* Fold (X ^ Y) & X as ~Y & X. */
10376 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10377 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10378 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10380 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10381 return fold_build2 (BIT_AND_EXPR, type,
10382 fold_build1 (BIT_NOT_EXPR, type, tem),
10383 fold_convert (type, arg1));
10385 /* Fold X & (X ^ Y) as X & ~Y. */
10386 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10387 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10389 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10390 return fold_build2 (BIT_AND_EXPR, type,
10391 fold_convert (type, arg0),
10392 fold_build1 (BIT_NOT_EXPR, type, tem));
10394 /* Fold X & (Y ^ X) as ~Y & X. */
10395 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10396 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10397 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10399 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10400 return fold_build2 (BIT_AND_EXPR, type,
10401 fold_build1 (BIT_NOT_EXPR, type, tem),
10402 fold_convert (type, arg0));
10405 t1 = distribute_bit_expr (code, type, arg0, arg1);
10406 if (t1 != NULL_TREE)
10408 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10409 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10410 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10413 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10415 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10416 && (~TREE_INT_CST_LOW (arg1)
10417 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10418 return fold_convert (type, TREE_OPERAND (arg0, 0));
10421 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10423 This results in more efficient code for machines without a NOR
10424 instruction. Combine will canonicalize to the first form
10425 which will allow use of NOR instructions provided by the
10426 backend if they exist. */
10427 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10428 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10430 return fold_build1 (BIT_NOT_EXPR, type,
10431 build2 (BIT_IOR_EXPR, type,
10432 TREE_OPERAND (arg0, 0),
10433 TREE_OPERAND (arg1, 0)));
10439 /* Don't touch a floating-point divide by zero unless the mode
10440 of the constant can represent infinity. */
10441 if (TREE_CODE (arg1) == REAL_CST
10442 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10443 && real_zerop (arg1))
10446 /* Optimize A / A to 1.0 if we don't care about
10447 NaNs or Infinities. Skip the transformation
10448 for non-real operands. */
10449 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10450 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10451 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10452 && operand_equal_p (arg0, arg1, 0))
10454 tree r = build_real (TREE_TYPE (arg0), dconst1);
10456 return omit_two_operands (type, r, arg0, arg1);
10459 /* The complex version of the above A / A optimization. */
10460 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10461 && operand_equal_p (arg0, arg1, 0))
10463 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10464 if (! HONOR_NANS (TYPE_MODE (elem_type))
10465 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10467 tree r = build_real (elem_type, dconst1);
10468 /* omit_two_operands will call fold_convert for us. */
10469 return omit_two_operands (type, r, arg0, arg1);
10473 /* (-A) / (-B) -> A / B */
10474 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10475 return fold_build2 (RDIV_EXPR, type,
10476 TREE_OPERAND (arg0, 0),
10477 negate_expr (arg1));
10478 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10479 return fold_build2 (RDIV_EXPR, type,
10480 negate_expr (arg0),
10481 TREE_OPERAND (arg1, 0));
10483 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10484 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10485 && real_onep (arg1))
10486 return non_lvalue (fold_convert (type, arg0));
10488 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10489 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10490 && real_minus_onep (arg1))
10491 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10493 /* If ARG1 is a constant, we can convert this to a multiply by the
10494 reciprocal. This does not have the same rounding properties,
10495 so only do this if -funsafe-math-optimizations. We can actually
10496 always safely do it if ARG1 is a power of two, but it's hard to
10497 tell if it is or not in a portable manner. */
10498 if (TREE_CODE (arg1) == REAL_CST)
10500 if (flag_unsafe_math_optimizations
10501 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10503 return fold_build2 (MULT_EXPR, type, arg0, tem);
10504 /* Find the reciprocal if optimizing and the result is exact. */
10508 r = TREE_REAL_CST (arg1);
10509 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10511 tem = build_real (type, r);
10512 return fold_build2 (MULT_EXPR, type,
10513 fold_convert (type, arg0), tem);
10517 /* Convert A/B/C to A/(B*C). */
10518 if (flag_unsafe_math_optimizations
10519 && TREE_CODE (arg0) == RDIV_EXPR)
10520 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10521 fold_build2 (MULT_EXPR, type,
10522 TREE_OPERAND (arg0, 1), arg1));
10524 /* Convert A/(B/C) to (A/B)*C. */
10525 if (flag_unsafe_math_optimizations
10526 && TREE_CODE (arg1) == RDIV_EXPR)
10527 return fold_build2 (MULT_EXPR, type,
10528 fold_build2 (RDIV_EXPR, type, arg0,
10529 TREE_OPERAND (arg1, 0)),
10530 TREE_OPERAND (arg1, 1));
10532 /* Convert C1/(X*C2) into (C1/C2)/X. */
10533 if (flag_unsafe_math_optimizations
10534 && TREE_CODE (arg1) == MULT_EXPR
10535 && TREE_CODE (arg0) == REAL_CST
10536 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10538 tree tem = const_binop (RDIV_EXPR, arg0,
10539 TREE_OPERAND (arg1, 1), 0);
10541 return fold_build2 (RDIV_EXPR, type, tem,
10542 TREE_OPERAND (arg1, 0));
10545 if (flag_unsafe_math_optimizations)
10547 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10548 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10550 /* Optimize sin(x)/cos(x) as tan(x). */
10551 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10552 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10553 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10554 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10555 CALL_EXPR_ARG (arg1, 0), 0))
10557 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10559 if (tanfn != NULL_TREE)
10560 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10563 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10564 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10565 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10566 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10567 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10568 CALL_EXPR_ARG (arg1, 0), 0))
10570 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10572 if (tanfn != NULL_TREE)
10574 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10575 return fold_build2 (RDIV_EXPR, type,
10576 build_real (type, dconst1), tmp);
10580 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10581 NaNs or Infinities. */
10582 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10583 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10584 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10586 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10587 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10589 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10590 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10591 && operand_equal_p (arg00, arg01, 0))
10593 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10595 if (cosfn != NULL_TREE)
10596 return build_call_expr (cosfn, 1, arg00);
10600 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10601 NaNs or Infinities. */
10602 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10603 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10604 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10606 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10607 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10609 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10610 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10611 && operand_equal_p (arg00, arg01, 0))
10613 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10615 if (cosfn != NULL_TREE)
10617 tree tmp = build_call_expr (cosfn, 1, arg00);
10618 return fold_build2 (RDIV_EXPR, type,
10619 build_real (type, dconst1),
10625 /* Optimize pow(x,c)/x as pow(x,c-1). */
10626 if (fcode0 == BUILT_IN_POW
10627 || fcode0 == BUILT_IN_POWF
10628 || fcode0 == BUILT_IN_POWL)
10630 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10631 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10632 if (TREE_CODE (arg01) == REAL_CST
10633 && !TREE_OVERFLOW (arg01)
10634 && operand_equal_p (arg1, arg00, 0))
10636 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10640 c = TREE_REAL_CST (arg01);
10641 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10642 arg = build_real (type, c);
10643 return build_call_expr (powfn, 2, arg1, arg);
10647 /* Optimize a/root(b/c) into a*root(c/b). */
10648 if (BUILTIN_ROOT_P (fcode1))
10650 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10652 if (TREE_CODE (rootarg) == RDIV_EXPR)
10654 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10655 tree b = TREE_OPERAND (rootarg, 0);
10656 tree c = TREE_OPERAND (rootarg, 1);
10658 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
10660 tmp = build_call_expr (rootfn, 1, tmp);
10661 return fold_build2 (MULT_EXPR, type, arg0, tmp);
10665 /* Optimize x/expN(y) into x*expN(-y). */
10666 if (BUILTIN_EXPONENT_P (fcode1))
10668 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10669 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10670 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10671 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10674 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10675 if (fcode1 == BUILT_IN_POW
10676 || fcode1 == BUILT_IN_POWF
10677 || fcode1 == BUILT_IN_POWL)
10679 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10680 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10681 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10682 tree neg11 = fold_convert (type, negate_expr (arg11));
10683 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10684 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10689 case TRUNC_DIV_EXPR:
10690 case FLOOR_DIV_EXPR:
10691 /* Simplify A / (B << N) where A and B are positive and B is
10692 a power of 2, to A >> (N + log2(B)). */
10693 strict_overflow_p = false;
10694 if (TREE_CODE (arg1) == LSHIFT_EXPR
10695 && (TYPE_UNSIGNED (type)
10696 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10698 tree sval = TREE_OPERAND (arg1, 0);
10699 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10701 tree sh_cnt = TREE_OPERAND (arg1, 1);
10702 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10704 if (strict_overflow_p)
10705 fold_overflow_warning (("assuming signed overflow does not "
10706 "occur when simplifying A / (B << N)"),
10707 WARN_STRICT_OVERFLOW_MISC);
10709 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10710 sh_cnt, build_int_cst (NULL_TREE, pow2));
10711 return fold_build2 (RSHIFT_EXPR, type,
10712 fold_convert (type, arg0), sh_cnt);
10717 case ROUND_DIV_EXPR:
10718 case CEIL_DIV_EXPR:
10719 case EXACT_DIV_EXPR:
10720 if (integer_onep (arg1))
10721 return non_lvalue (fold_convert (type, arg0));
10722 if (integer_zerop (arg1))
10724 /* X / -1 is -X. */
10725 if (!TYPE_UNSIGNED (type)
10726 && TREE_CODE (arg1) == INTEGER_CST
10727 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10728 && TREE_INT_CST_HIGH (arg1) == -1)
10729 return fold_convert (type, negate_expr (arg0));
10731 /* Convert -A / -B to A / B when the type is signed and overflow is
10733 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10734 && TREE_CODE (arg0) == NEGATE_EXPR
10735 && negate_expr_p (arg1))
10737 if (INTEGRAL_TYPE_P (type))
10738 fold_overflow_warning (("assuming signed overflow does not occur "
10739 "when distributing negation across "
10741 WARN_STRICT_OVERFLOW_MISC);
10742 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10743 negate_expr (arg1));
10745 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10746 && TREE_CODE (arg1) == NEGATE_EXPR
10747 && negate_expr_p (arg0))
10749 if (INTEGRAL_TYPE_P (type))
10750 fold_overflow_warning (("assuming signed overflow does not occur "
10751 "when distributing negation across "
10753 WARN_STRICT_OVERFLOW_MISC);
10754 return fold_build2 (code, type, negate_expr (arg0),
10755 TREE_OPERAND (arg1, 0));
10758 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10759 operation, EXACT_DIV_EXPR.
10761 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10762 At one time others generated faster code, it's not clear if they do
10763 after the last round to changes to the DIV code in expmed.c. */
10764 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10765 && multiple_of_p (type, arg0, arg1))
10766 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10768 strict_overflow_p = false;
10769 if (TREE_CODE (arg1) == INTEGER_CST
10770 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10771 &strict_overflow_p)))
10773 if (strict_overflow_p)
10774 fold_overflow_warning (("assuming signed overflow does not occur "
10775 "when simplifying division"),
10776 WARN_STRICT_OVERFLOW_MISC);
10777 return fold_convert (type, tem);
10782 case CEIL_MOD_EXPR:
10783 case FLOOR_MOD_EXPR:
10784 case ROUND_MOD_EXPR:
10785 case TRUNC_MOD_EXPR:
10786 /* X % 1 is always zero, but be sure to preserve any side
10788 if (integer_onep (arg1))
10789 return omit_one_operand (type, integer_zero_node, arg0);
10791 /* X % 0, return X % 0 unchanged so that we can get the
10792 proper warnings and errors. */
10793 if (integer_zerop (arg1))
10796 /* 0 % X is always zero, but be sure to preserve any side
10797 effects in X. Place this after checking for X == 0. */
10798 if (integer_zerop (arg0))
10799 return omit_one_operand (type, integer_zero_node, arg1);
10801 /* X % -1 is zero. */
10802 if (!TYPE_UNSIGNED (type)
10803 && TREE_CODE (arg1) == INTEGER_CST
10804 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10805 && TREE_INT_CST_HIGH (arg1) == -1)
10806 return omit_one_operand (type, integer_zero_node, arg0);
10808 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10809 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10810 strict_overflow_p = false;
10811 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10812 && (TYPE_UNSIGNED (type)
10813 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10816 /* Also optimize A % (C << N) where C is a power of 2,
10817 to A & ((C << N) - 1). */
10818 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10819 c = TREE_OPERAND (arg1, 0);
10821 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10823 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10824 build_int_cst (TREE_TYPE (arg1), 1));
10825 if (strict_overflow_p)
10826 fold_overflow_warning (("assuming signed overflow does not "
10827 "occur when simplifying "
10828 "X % (power of two)"),
10829 WARN_STRICT_OVERFLOW_MISC);
10830 return fold_build2 (BIT_AND_EXPR, type,
10831 fold_convert (type, arg0),
10832 fold_convert (type, mask));
10836 /* X % -C is the same as X % C. */
10837 if (code == TRUNC_MOD_EXPR
10838 && !TYPE_UNSIGNED (type)
10839 && TREE_CODE (arg1) == INTEGER_CST
10840 && !TREE_OVERFLOW (arg1)
10841 && TREE_INT_CST_HIGH (arg1) < 0
10842 && !TYPE_OVERFLOW_TRAPS (type)
10843 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10844 && !sign_bit_p (arg1, arg1))
10845 return fold_build2 (code, type, fold_convert (type, arg0),
10846 fold_convert (type, negate_expr (arg1)));
10848 /* X % -Y is the same as X % Y. */
10849 if (code == TRUNC_MOD_EXPR
10850 && !TYPE_UNSIGNED (type)
10851 && TREE_CODE (arg1) == NEGATE_EXPR
10852 && !TYPE_OVERFLOW_TRAPS (type))
10853 return fold_build2 (code, type, fold_convert (type, arg0),
10854 fold_convert (type, TREE_OPERAND (arg1, 0)));
10856 if (TREE_CODE (arg1) == INTEGER_CST
10857 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10858 &strict_overflow_p)))
10860 if (strict_overflow_p)
10861 fold_overflow_warning (("assuming signed overflow does not occur "
10862 "when simplifying modulos"),
10863 WARN_STRICT_OVERFLOW_MISC);
10864 return fold_convert (type, tem);
10871 if (integer_all_onesp (arg0))
10872 return omit_one_operand (type, arg0, arg1);
10876 /* Optimize -1 >> x for arithmetic right shifts. */
10877 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10878 return omit_one_operand (type, arg0, arg1);
10879 /* ... fall through ... */
10883 if (integer_zerop (arg1))
10884 return non_lvalue (fold_convert (type, arg0));
10885 if (integer_zerop (arg0))
10886 return omit_one_operand (type, arg0, arg1);
10888 /* Since negative shift count is not well-defined,
10889 don't try to compute it in the compiler. */
10890 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10893 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10894 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10895 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10896 && host_integerp (TREE_OPERAND (arg0, 1), false)
10897 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10899 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10900 + TREE_INT_CST_LOW (arg1));
10902 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10903 being well defined. */
10904 if (low >= TYPE_PRECISION (type))
10906 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10907 low = low % TYPE_PRECISION (type);
10908 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10909 return build_int_cst (type, 0);
10911 low = TYPE_PRECISION (type) - 1;
10914 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10915 build_int_cst (type, low));
10918 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10919 into x & ((unsigned)-1 >> c) for unsigned types. */
10920 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10921 || (TYPE_UNSIGNED (type)
10922 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10923 && host_integerp (arg1, false)
10924 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10925 && host_integerp (TREE_OPERAND (arg0, 1), false)
10926 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10928 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10929 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10935 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10937 lshift = build_int_cst (type, -1);
10938 lshift = int_const_binop (code, lshift, arg1, 0);
10940 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10944 /* Rewrite an LROTATE_EXPR by a constant into an
10945 RROTATE_EXPR by a new constant. */
10946 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10948 tree tem = build_int_cst (TREE_TYPE (arg1),
10949 GET_MODE_BITSIZE (TYPE_MODE (type)));
10950 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10951 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10954 /* If we have a rotate of a bit operation with the rotate count and
10955 the second operand of the bit operation both constant,
10956 permute the two operations. */
10957 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10958 && (TREE_CODE (arg0) == BIT_AND_EXPR
10959 || TREE_CODE (arg0) == BIT_IOR_EXPR
10960 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10961 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10962 return fold_build2 (TREE_CODE (arg0), type,
10963 fold_build2 (code, type,
10964 TREE_OPERAND (arg0, 0), arg1),
10965 fold_build2 (code, type,
10966 TREE_OPERAND (arg0, 1), arg1));
10968 /* Two consecutive rotates adding up to the width of the mode can
10970 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10971 && TREE_CODE (arg0) == RROTATE_EXPR
10972 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10973 && TREE_INT_CST_HIGH (arg1) == 0
10974 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10975 && ((TREE_INT_CST_LOW (arg1)
10976 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10977 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10978 return TREE_OPERAND (arg0, 0);
10983 if (operand_equal_p (arg0, arg1, 0))
10984 return omit_one_operand (type, arg0, arg1);
10985 if (INTEGRAL_TYPE_P (type)
10986 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10987 return omit_one_operand (type, arg1, arg0);
10988 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10994 if (operand_equal_p (arg0, arg1, 0))
10995 return omit_one_operand (type, arg0, arg1);
10996 if (INTEGRAL_TYPE_P (type)
10997 && TYPE_MAX_VALUE (type)
10998 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10999 return omit_one_operand (type, arg1, arg0);
11000 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11005 case TRUTH_ANDIF_EXPR:
11006 /* Note that the operands of this must be ints
11007 and their values must be 0 or 1.
11008 ("true" is a fixed value perhaps depending on the language.) */
11009 /* If first arg is constant zero, return it. */
11010 if (integer_zerop (arg0))
11011 return fold_convert (type, arg0);
11012 case TRUTH_AND_EXPR:
11013 /* If either arg is constant true, drop it. */
11014 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11015 return non_lvalue (fold_convert (type, arg1));
11016 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11017 /* Preserve sequence points. */
11018 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11019 return non_lvalue (fold_convert (type, arg0));
11020 /* If second arg is constant zero, result is zero, but first arg
11021 must be evaluated. */
11022 if (integer_zerop (arg1))
11023 return omit_one_operand (type, arg1, arg0);
11024 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11025 case will be handled here. */
11026 if (integer_zerop (arg0))
11027 return omit_one_operand (type, arg0, arg1);
11029 /* !X && X is always false. */
11030 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11031 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11032 return omit_one_operand (type, integer_zero_node, arg1);
11033 /* X && !X is always false. */
11034 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11036 return omit_one_operand (type, integer_zero_node, arg0);
11038 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11039 means A >= Y && A != MAX, but in this case we know that
11042 if (!TREE_SIDE_EFFECTS (arg0)
11043 && !TREE_SIDE_EFFECTS (arg1))
11045 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11046 if (tem && !operand_equal_p (tem, arg0, 0))
11047 return fold_build2 (code, type, tem, arg1);
11049 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11050 if (tem && !operand_equal_p (tem, arg1, 0))
11051 return fold_build2 (code, type, arg0, tem);
11055 /* We only do these simplifications if we are optimizing. */
11059 /* Check for things like (A || B) && (A || C). We can convert this
11060 to A || (B && C). Note that either operator can be any of the four
11061 truth and/or operations and the transformation will still be
11062 valid. Also note that we only care about order for the
11063 ANDIF and ORIF operators. If B contains side effects, this
11064 might change the truth-value of A. */
11065 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11066 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11067 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11068 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11069 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11070 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11072 tree a00 = TREE_OPERAND (arg0, 0);
11073 tree a01 = TREE_OPERAND (arg0, 1);
11074 tree a10 = TREE_OPERAND (arg1, 0);
11075 tree a11 = TREE_OPERAND (arg1, 1);
11076 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11077 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11078 && (code == TRUTH_AND_EXPR
11079 || code == TRUTH_OR_EXPR));
11081 if (operand_equal_p (a00, a10, 0))
11082 return fold_build2 (TREE_CODE (arg0), type, a00,
11083 fold_build2 (code, type, a01, a11));
11084 else if (commutative && operand_equal_p (a00, a11, 0))
11085 return fold_build2 (TREE_CODE (arg0), type, a00,
11086 fold_build2 (code, type, a01, a10));
11087 else if (commutative && operand_equal_p (a01, a10, 0))
11088 return fold_build2 (TREE_CODE (arg0), type, a01,
11089 fold_build2 (code, type, a00, a11));
11091 /* This case if tricky because we must either have commutative
11092 operators or else A10 must not have side-effects. */
11094 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11095 && operand_equal_p (a01, a11, 0))
11096 return fold_build2 (TREE_CODE (arg0), type,
11097 fold_build2 (code, type, a00, a10),
11101 /* See if we can build a range comparison. */
11102 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11105 /* Check for the possibility of merging component references. If our
11106 lhs is another similar operation, try to merge its rhs with our
11107 rhs. Then try to merge our lhs and rhs. */
11108 if (TREE_CODE (arg0) == code
11109 && 0 != (tem = fold_truthop (code, type,
11110 TREE_OPERAND (arg0, 1), arg1)))
11111 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11113 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11118 case TRUTH_ORIF_EXPR:
11119 /* Note that the operands of this must be ints
11120 and their values must be 0 or true.
11121 ("true" is a fixed value perhaps depending on the language.) */
11122 /* If first arg is constant true, return it. */
11123 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11124 return fold_convert (type, arg0);
11125 case TRUTH_OR_EXPR:
11126 /* If either arg is constant zero, drop it. */
11127 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11128 return non_lvalue (fold_convert (type, arg1));
11129 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11130 /* Preserve sequence points. */
11131 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11132 return non_lvalue (fold_convert (type, arg0));
11133 /* If second arg is constant true, result is true, but we must
11134 evaluate first arg. */
11135 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11136 return omit_one_operand (type, arg1, arg0);
11137 /* Likewise for first arg, but note this only occurs here for
11139 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11140 return omit_one_operand (type, arg0, arg1);
11142 /* !X || X is always true. */
11143 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11144 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11145 return omit_one_operand (type, integer_one_node, arg1);
11146 /* X || !X is always true. */
11147 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11148 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11149 return omit_one_operand (type, integer_one_node, arg0);
11153 case TRUTH_XOR_EXPR:
11154 /* If the second arg is constant zero, drop it. */
11155 if (integer_zerop (arg1))
11156 return non_lvalue (fold_convert (type, arg0));
11157 /* If the second arg is constant true, this is a logical inversion. */
11158 if (integer_onep (arg1))
11160 /* Only call invert_truthvalue if operand is a truth value. */
11161 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11162 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11164 tem = invert_truthvalue (arg0);
11165 return non_lvalue (fold_convert (type, tem));
11167 /* Identical arguments cancel to zero. */
11168 if (operand_equal_p (arg0, arg1, 0))
11169 return omit_one_operand (type, integer_zero_node, arg0);
11171 /* !X ^ X is always true. */
11172 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11173 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11174 return omit_one_operand (type, integer_one_node, arg1);
11176 /* X ^ !X is always true. */
11177 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11178 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11179 return omit_one_operand (type, integer_one_node, arg0);
11185 tem = fold_comparison (code, type, op0, op1);
11186 if (tem != NULL_TREE)
11189 /* bool_var != 0 becomes bool_var. */
11190 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11191 && code == NE_EXPR)
11192 return non_lvalue (fold_convert (type, arg0));
11194 /* bool_var == 1 becomes bool_var. */
11195 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11196 && code == EQ_EXPR)
11197 return non_lvalue (fold_convert (type, arg0));
11199 /* bool_var != 1 becomes !bool_var. */
11200 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11201 && code == NE_EXPR)
11202 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11204 /* bool_var == 0 becomes !bool_var. */
11205 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11206 && code == EQ_EXPR)
11207 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11209 /* If this is an equality comparison of the address of two non-weak,
11210 unaliased symbols neither of which are extern (since we do not
11211 have access to attributes for externs), then we know the result. */
11212 if (TREE_CODE (arg0) == ADDR_EXPR
11213 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11214 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11215 && ! lookup_attribute ("alias",
11216 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11217 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11218 && TREE_CODE (arg1) == ADDR_EXPR
11219 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11220 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11221 && ! lookup_attribute ("alias",
11222 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11223 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11225 /* We know that we're looking at the address of two
11226 non-weak, unaliased, static _DECL nodes.
11228 It is both wasteful and incorrect to call operand_equal_p
11229 to compare the two ADDR_EXPR nodes. It is wasteful in that
11230 all we need to do is test pointer equality for the arguments
11231 to the two ADDR_EXPR nodes. It is incorrect to use
11232 operand_equal_p as that function is NOT equivalent to a
11233 C equality test. It can in fact return false for two
11234 objects which would test as equal using the C equality
11236 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11237 return constant_boolean_node (equal
11238 ? code == EQ_EXPR : code != EQ_EXPR,
11242 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11243 a MINUS_EXPR of a constant, we can convert it into a comparison with
11244 a revised constant as long as no overflow occurs. */
11245 if (TREE_CODE (arg1) == INTEGER_CST
11246 && (TREE_CODE (arg0) == PLUS_EXPR
11247 || TREE_CODE (arg0) == MINUS_EXPR)
11248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11249 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11250 ? MINUS_EXPR : PLUS_EXPR,
11251 fold_convert (TREE_TYPE (arg0), arg1),
11252 TREE_OPERAND (arg0, 1), 0))
11253 && !TREE_OVERFLOW (tem))
11254 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11256 /* Similarly for a NEGATE_EXPR. */
11257 if (TREE_CODE (arg0) == NEGATE_EXPR
11258 && TREE_CODE (arg1) == INTEGER_CST
11259 && 0 != (tem = negate_expr (arg1))
11260 && TREE_CODE (tem) == INTEGER_CST
11261 && !TREE_OVERFLOW (tem))
11262 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11264 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11265 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11266 && TREE_CODE (arg1) == INTEGER_CST
11267 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11268 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11269 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11270 fold_convert (TREE_TYPE (arg0), arg1),
11271 TREE_OPERAND (arg0, 1)));
11273 /* Transform comparisons of the form X +- C CMP X. */
11274 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11275 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11277 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11278 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11280 tree cst = TREE_OPERAND (arg0, 1);
11282 if (code == EQ_EXPR
11283 && !integer_zerop (cst))
11284 return omit_two_operands (type, boolean_false_node,
11285 TREE_OPERAND (arg0, 0), arg1);
11287 return omit_two_operands (type, boolean_true_node,
11288 TREE_OPERAND (arg0, 0), arg1);
11291 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11292 for !=. Don't do this for ordered comparisons due to overflow. */
11293 if (TREE_CODE (arg0) == MINUS_EXPR
11294 && integer_zerop (arg1))
11295 return fold_build2 (code, type,
11296 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11298 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11299 if (TREE_CODE (arg0) == ABS_EXPR
11300 && (integer_zerop (arg1) || real_zerop (arg1)))
11301 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11303 /* If this is an EQ or NE comparison with zero and ARG0 is
11304 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11305 two operations, but the latter can be done in one less insn
11306 on machines that have only two-operand insns or on which a
11307 constant cannot be the first operand. */
11308 if (TREE_CODE (arg0) == BIT_AND_EXPR
11309 && integer_zerop (arg1))
11311 tree arg00 = TREE_OPERAND (arg0, 0);
11312 tree arg01 = TREE_OPERAND (arg0, 1);
11313 if (TREE_CODE (arg00) == LSHIFT_EXPR
11314 && integer_onep (TREE_OPERAND (arg00, 0)))
11316 fold_build2 (code, type,
11317 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11318 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11319 arg01, TREE_OPERAND (arg00, 1)),
11320 fold_convert (TREE_TYPE (arg0),
11321 integer_one_node)),
11323 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11324 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11326 fold_build2 (code, type,
11327 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11328 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11329 arg00, TREE_OPERAND (arg01, 1)),
11330 fold_convert (TREE_TYPE (arg0),
11331 integer_one_node)),
11335 /* If this is an NE or EQ comparison of zero against the result of a
11336 signed MOD operation whose second operand is a power of 2, make
11337 the MOD operation unsigned since it is simpler and equivalent. */
11338 if (integer_zerop (arg1)
11339 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11340 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11341 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11342 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11343 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11344 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11346 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11347 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11348 fold_convert (newtype,
11349 TREE_OPERAND (arg0, 0)),
11350 fold_convert (newtype,
11351 TREE_OPERAND (arg0, 1)));
11353 return fold_build2 (code, type, newmod,
11354 fold_convert (newtype, arg1));
11357 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11358 C1 is a valid shift constant, and C2 is a power of two, i.e.
11360 if (TREE_CODE (arg0) == BIT_AND_EXPR
11361 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11362 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11364 && integer_pow2p (TREE_OPERAND (arg0, 1))
11365 && integer_zerop (arg1))
11367 tree itype = TREE_TYPE (arg0);
11368 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11369 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11371 /* Check for a valid shift count. */
11372 if (TREE_INT_CST_HIGH (arg001) == 0
11373 && TREE_INT_CST_LOW (arg001) < prec)
11375 tree arg01 = TREE_OPERAND (arg0, 1);
11376 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11377 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11378 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11379 can be rewritten as (X & (C2 << C1)) != 0. */
11380 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11382 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11383 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11384 return fold_build2 (code, type, tem, arg1);
11386 /* Otherwise, for signed (arithmetic) shifts,
11387 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11388 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11389 else if (!TYPE_UNSIGNED (itype))
11390 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11391 arg000, build_int_cst (itype, 0));
11392 /* Otherwise, of unsigned (logical) shifts,
11393 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11394 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11396 return omit_one_operand (type,
11397 code == EQ_EXPR ? integer_one_node
11398 : integer_zero_node,
11403 /* If this is an NE comparison of zero with an AND of one, remove the
11404 comparison since the AND will give the correct value. */
11405 if (code == NE_EXPR
11406 && integer_zerop (arg1)
11407 && TREE_CODE (arg0) == BIT_AND_EXPR
11408 && integer_onep (TREE_OPERAND (arg0, 1)))
11409 return fold_convert (type, arg0);
11411 /* If we have (A & C) == C where C is a power of 2, convert this into
11412 (A & C) != 0. Similarly for NE_EXPR. */
11413 if (TREE_CODE (arg0) == BIT_AND_EXPR
11414 && integer_pow2p (TREE_OPERAND (arg0, 1))
11415 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11416 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11417 arg0, fold_convert (TREE_TYPE (arg0),
11418 integer_zero_node));
11420 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11421 bit, then fold the expression into A < 0 or A >= 0. */
11422 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11426 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11427 Similarly for NE_EXPR. */
11428 if (TREE_CODE (arg0) == BIT_AND_EXPR
11429 && TREE_CODE (arg1) == INTEGER_CST
11430 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11432 tree notc = fold_build1 (BIT_NOT_EXPR,
11433 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11434 TREE_OPERAND (arg0, 1));
11435 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11437 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11438 if (integer_nonzerop (dandnotc))
11439 return omit_one_operand (type, rslt, arg0);
11442 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11443 Similarly for NE_EXPR. */
11444 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11445 && TREE_CODE (arg1) == INTEGER_CST
11446 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11448 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11449 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11450 TREE_OPERAND (arg0, 1), notd);
11451 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11452 if (integer_nonzerop (candnotd))
11453 return omit_one_operand (type, rslt, arg0);
11456 /* If this is a comparison of a field, we may be able to simplify it. */
11457 if ((TREE_CODE (arg0) == COMPONENT_REF
11458 || TREE_CODE (arg0) == BIT_FIELD_REF)
11459 /* Handle the constant case even without -O
11460 to make sure the warnings are given. */
11461 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11463 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11468 /* Optimize comparisons of strlen vs zero to a compare of the
11469 first character of the string vs zero. To wit,
11470 strlen(ptr) == 0 => *ptr == 0
11471 strlen(ptr) != 0 => *ptr != 0
11472 Other cases should reduce to one of these two (or a constant)
11473 due to the return value of strlen being unsigned. */
11474 if (TREE_CODE (arg0) == CALL_EXPR
11475 && integer_zerop (arg1))
11477 tree fndecl = get_callee_fndecl (arg0);
11480 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11481 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11482 && call_expr_nargs (arg0) == 1
11483 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11485 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11486 return fold_build2 (code, type, iref,
11487 build_int_cst (TREE_TYPE (iref), 0));
11491 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11492 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11493 if (TREE_CODE (arg0) == RSHIFT_EXPR
11494 && integer_zerop (arg1)
11495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11497 tree arg00 = TREE_OPERAND (arg0, 0);
11498 tree arg01 = TREE_OPERAND (arg0, 1);
11499 tree itype = TREE_TYPE (arg00);
11500 if (TREE_INT_CST_HIGH (arg01) == 0
11501 && TREE_INT_CST_LOW (arg01)
11502 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11504 if (TYPE_UNSIGNED (itype))
11506 itype = signed_type_for (itype);
11507 arg00 = fold_convert (itype, arg00);
11509 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11510 type, arg00, build_int_cst (itype, 0));
11514 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11515 if (integer_zerop (arg1)
11516 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11517 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11518 TREE_OPERAND (arg0, 1));
11520 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11521 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11522 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11523 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11524 build_int_cst (TREE_TYPE (arg1), 0));
11525 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11526 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11527 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11528 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11529 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11530 build_int_cst (TREE_TYPE (arg1), 0));
11532 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11533 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11534 && TREE_CODE (arg1) == INTEGER_CST
11535 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11536 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11537 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11538 TREE_OPERAND (arg0, 1), arg1));
11540 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11541 (X & C) == 0 when C is a single bit. */
11542 if (TREE_CODE (arg0) == BIT_AND_EXPR
11543 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11544 && integer_zerop (arg1)
11545 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11547 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11548 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11549 TREE_OPERAND (arg0, 1));
11550 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11554 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11555 constant C is a power of two, i.e. a single bit. */
11556 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11557 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11558 && integer_zerop (arg1)
11559 && integer_pow2p (TREE_OPERAND (arg0, 1))
11560 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11561 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11563 tree arg00 = TREE_OPERAND (arg0, 0);
11564 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11565 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11568 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11569 when is C is a power of two, i.e. a single bit. */
11570 if (TREE_CODE (arg0) == BIT_AND_EXPR
11571 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11572 && integer_zerop (arg1)
11573 && integer_pow2p (TREE_OPERAND (arg0, 1))
11574 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11575 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11577 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11578 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11579 arg000, TREE_OPERAND (arg0, 1));
11580 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11581 tem, build_int_cst (TREE_TYPE (tem), 0));
11584 if (integer_zerop (arg1)
11585 && tree_expr_nonzero_p (arg0))
11587 tree res = constant_boolean_node (code==NE_EXPR, type);
11588 return omit_one_operand (type, res, arg0);
11591 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11592 if (TREE_CODE (arg0) == NEGATE_EXPR
11593 && TREE_CODE (arg1) == NEGATE_EXPR)
11594 return fold_build2 (code, type,
11595 TREE_OPERAND (arg0, 0),
11596 TREE_OPERAND (arg1, 0));
11598 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11599 if (TREE_CODE (arg0) == BIT_AND_EXPR
11600 && TREE_CODE (arg1) == BIT_AND_EXPR)
11602 tree arg00 = TREE_OPERAND (arg0, 0);
11603 tree arg01 = TREE_OPERAND (arg0, 1);
11604 tree arg10 = TREE_OPERAND (arg1, 0);
11605 tree arg11 = TREE_OPERAND (arg1, 1);
11606 tree itype = TREE_TYPE (arg0);
11608 if (operand_equal_p (arg01, arg11, 0))
11609 return fold_build2 (code, type,
11610 fold_build2 (BIT_AND_EXPR, itype,
11611 fold_build2 (BIT_XOR_EXPR, itype,
11614 build_int_cst (itype, 0));
11616 if (operand_equal_p (arg01, arg10, 0))
11617 return fold_build2 (code, type,
11618 fold_build2 (BIT_AND_EXPR, itype,
11619 fold_build2 (BIT_XOR_EXPR, itype,
11622 build_int_cst (itype, 0));
11624 if (operand_equal_p (arg00, arg11, 0))
11625 return fold_build2 (code, type,
11626 fold_build2 (BIT_AND_EXPR, itype,
11627 fold_build2 (BIT_XOR_EXPR, itype,
11630 build_int_cst (itype, 0));
11632 if (operand_equal_p (arg00, arg10, 0))
11633 return fold_build2 (code, type,
11634 fold_build2 (BIT_AND_EXPR, itype,
11635 fold_build2 (BIT_XOR_EXPR, itype,
11638 build_int_cst (itype, 0));
11641 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11642 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11644 tree arg00 = TREE_OPERAND (arg0, 0);
11645 tree arg01 = TREE_OPERAND (arg0, 1);
11646 tree arg10 = TREE_OPERAND (arg1, 0);
11647 tree arg11 = TREE_OPERAND (arg1, 1);
11648 tree itype = TREE_TYPE (arg0);
11650 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11651 operand_equal_p guarantees no side-effects so we don't need
11652 to use omit_one_operand on Z. */
11653 if (operand_equal_p (arg01, arg11, 0))
11654 return fold_build2 (code, type, arg00, arg10);
11655 if (operand_equal_p (arg01, arg10, 0))
11656 return fold_build2 (code, type, arg00, arg11);
11657 if (operand_equal_p (arg00, arg11, 0))
11658 return fold_build2 (code, type, arg01, arg10);
11659 if (operand_equal_p (arg00, arg10, 0))
11660 return fold_build2 (code, type, arg01, arg11);
11662 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11663 if (TREE_CODE (arg01) == INTEGER_CST
11664 && TREE_CODE (arg11) == INTEGER_CST)
11665 return fold_build2 (code, type,
11666 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11667 fold_build2 (BIT_XOR_EXPR, itype,
11672 /* Attempt to simplify equality/inequality comparisons of complex
11673 values. Only lower the comparison if the result is known or
11674 can be simplified to a single scalar comparison. */
11675 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11676 || TREE_CODE (arg0) == COMPLEX_CST)
11677 && (TREE_CODE (arg1) == COMPLEX_EXPR
11678 || TREE_CODE (arg1) == COMPLEX_CST))
11680 tree real0, imag0, real1, imag1;
11683 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11685 real0 = TREE_OPERAND (arg0, 0);
11686 imag0 = TREE_OPERAND (arg0, 1);
11690 real0 = TREE_REALPART (arg0);
11691 imag0 = TREE_IMAGPART (arg0);
11694 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11696 real1 = TREE_OPERAND (arg1, 0);
11697 imag1 = TREE_OPERAND (arg1, 1);
11701 real1 = TREE_REALPART (arg1);
11702 imag1 = TREE_IMAGPART (arg1);
11705 rcond = fold_binary (code, type, real0, real1);
11706 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11708 if (integer_zerop (rcond))
11710 if (code == EQ_EXPR)
11711 return omit_two_operands (type, boolean_false_node,
11713 return fold_build2 (NE_EXPR, type, imag0, imag1);
11717 if (code == NE_EXPR)
11718 return omit_two_operands (type, boolean_true_node,
11720 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11724 icond = fold_binary (code, type, imag0, imag1);
11725 if (icond && TREE_CODE (icond) == INTEGER_CST)
11727 if (integer_zerop (icond))
11729 if (code == EQ_EXPR)
11730 return omit_two_operands (type, boolean_false_node,
11732 return fold_build2 (NE_EXPR, type, real0, real1);
11736 if (code == NE_EXPR)
11737 return omit_two_operands (type, boolean_true_node,
11739 return fold_build2 (EQ_EXPR, type, real0, real1);
11750 tem = fold_comparison (code, type, op0, op1);
11751 if (tem != NULL_TREE)
11754 /* Transform comparisons of the form X +- C CMP X. */
11755 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11756 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11757 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11758 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11759 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11760 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11762 tree arg01 = TREE_OPERAND (arg0, 1);
11763 enum tree_code code0 = TREE_CODE (arg0);
11766 if (TREE_CODE (arg01) == REAL_CST)
11767 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11769 is_positive = tree_int_cst_sgn (arg01);
11771 /* (X - c) > X becomes false. */
11772 if (code == GT_EXPR
11773 && ((code0 == MINUS_EXPR && is_positive >= 0)
11774 || (code0 == PLUS_EXPR && is_positive <= 0)))
11776 if (TREE_CODE (arg01) == INTEGER_CST
11777 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11778 fold_overflow_warning (("assuming signed overflow does not "
11779 "occur when assuming that (X - c) > X "
11780 "is always false"),
11781 WARN_STRICT_OVERFLOW_ALL);
11782 return constant_boolean_node (0, type);
11785 /* Likewise (X + c) < X becomes false. */
11786 if (code == LT_EXPR
11787 && ((code0 == PLUS_EXPR && is_positive >= 0)
11788 || (code0 == MINUS_EXPR && is_positive <= 0)))
11790 if (TREE_CODE (arg01) == INTEGER_CST
11791 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11792 fold_overflow_warning (("assuming signed overflow does not "
11793 "occur when assuming that "
11794 "(X + c) < X is always false"),
11795 WARN_STRICT_OVERFLOW_ALL);
11796 return constant_boolean_node (0, type);
11799 /* Convert (X - c) <= X to true. */
11800 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11802 && ((code0 == MINUS_EXPR && is_positive >= 0)
11803 || (code0 == PLUS_EXPR && is_positive <= 0)))
11805 if (TREE_CODE (arg01) == INTEGER_CST
11806 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11807 fold_overflow_warning (("assuming signed overflow does not "
11808 "occur when assuming that "
11809 "(X - c) <= X is always true"),
11810 WARN_STRICT_OVERFLOW_ALL);
11811 return constant_boolean_node (1, type);
11814 /* Convert (X + c) >= X to true. */
11815 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11817 && ((code0 == PLUS_EXPR && is_positive >= 0)
11818 || (code0 == MINUS_EXPR && is_positive <= 0)))
11820 if (TREE_CODE (arg01) == INTEGER_CST
11821 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11822 fold_overflow_warning (("assuming signed overflow does not "
11823 "occur when assuming that "
11824 "(X + c) >= X is always true"),
11825 WARN_STRICT_OVERFLOW_ALL);
11826 return constant_boolean_node (1, type);
11829 if (TREE_CODE (arg01) == INTEGER_CST)
11831 /* Convert X + c > X and X - c < X to true for integers. */
11832 if (code == GT_EXPR
11833 && ((code0 == PLUS_EXPR && is_positive > 0)
11834 || (code0 == MINUS_EXPR && is_positive < 0)))
11836 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11837 fold_overflow_warning (("assuming signed overflow does "
11838 "not occur when assuming that "
11839 "(X + c) > X is always true"),
11840 WARN_STRICT_OVERFLOW_ALL);
11841 return constant_boolean_node (1, type);
11844 if (code == LT_EXPR
11845 && ((code0 == MINUS_EXPR && is_positive > 0)
11846 || (code0 == PLUS_EXPR && is_positive < 0)))
11848 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11849 fold_overflow_warning (("assuming signed overflow does "
11850 "not occur when assuming that "
11851 "(X - c) < X is always true"),
11852 WARN_STRICT_OVERFLOW_ALL);
11853 return constant_boolean_node (1, type);
11856 /* Convert X + c <= X and X - c >= X to false for integers. */
11857 if (code == LE_EXPR
11858 && ((code0 == PLUS_EXPR && is_positive > 0)
11859 || (code0 == MINUS_EXPR && is_positive < 0)))
11861 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11862 fold_overflow_warning (("assuming signed overflow does "
11863 "not occur when assuming that "
11864 "(X + c) <= X is always false"),
11865 WARN_STRICT_OVERFLOW_ALL);
11866 return constant_boolean_node (0, type);
11869 if (code == GE_EXPR
11870 && ((code0 == MINUS_EXPR && is_positive > 0)
11871 || (code0 == PLUS_EXPR && is_positive < 0)))
11873 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11874 fold_overflow_warning (("assuming signed overflow does "
11875 "not occur when assuming that "
11876 "(X - c) >= X is always false"),
11877 WARN_STRICT_OVERFLOW_ALL);
11878 return constant_boolean_node (0, type);
11883 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11884 This transformation affects the cases which are handled in later
11885 optimizations involving comparisons with non-negative constants. */
11886 if (TREE_CODE (arg1) == INTEGER_CST
11887 && TREE_CODE (arg0) != INTEGER_CST
11888 && tree_int_cst_sgn (arg1) > 0)
11890 if (code == GE_EXPR)
11892 arg1 = const_binop (MINUS_EXPR, arg1,
11893 build_int_cst (TREE_TYPE (arg1), 1), 0);
11894 return fold_build2 (GT_EXPR, type, arg0,
11895 fold_convert (TREE_TYPE (arg0), arg1));
11897 if (code == LT_EXPR)
11899 arg1 = const_binop (MINUS_EXPR, arg1,
11900 build_int_cst (TREE_TYPE (arg1), 1), 0);
11901 return fold_build2 (LE_EXPR, type, arg0,
11902 fold_convert (TREE_TYPE (arg0), arg1));
11906 /* Comparisons with the highest or lowest possible integer of
11907 the specified precision will have known values. */
11909 tree arg1_type = TREE_TYPE (arg1);
11910 unsigned int width = TYPE_PRECISION (arg1_type);
11912 if (TREE_CODE (arg1) == INTEGER_CST
11913 && !TREE_OVERFLOW (arg1)
11914 && width <= 2 * HOST_BITS_PER_WIDE_INT
11915 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11917 HOST_WIDE_INT signed_max_hi;
11918 unsigned HOST_WIDE_INT signed_max_lo;
11919 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11921 if (width <= HOST_BITS_PER_WIDE_INT)
11923 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11928 if (TYPE_UNSIGNED (arg1_type))
11930 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11936 max_lo = signed_max_lo;
11937 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11943 width -= HOST_BITS_PER_WIDE_INT;
11944 signed_max_lo = -1;
11945 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11950 if (TYPE_UNSIGNED (arg1_type))
11952 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11957 max_hi = signed_max_hi;
11958 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11962 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11963 && TREE_INT_CST_LOW (arg1) == max_lo)
11967 return omit_one_operand (type, integer_zero_node, arg0);
11970 return fold_build2 (EQ_EXPR, type, op0, op1);
11973 return omit_one_operand (type, integer_one_node, arg0);
11976 return fold_build2 (NE_EXPR, type, op0, op1);
11978 /* The GE_EXPR and LT_EXPR cases above are not normally
11979 reached because of previous transformations. */
11984 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11986 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11990 arg1 = const_binop (PLUS_EXPR, arg1,
11991 build_int_cst (TREE_TYPE (arg1), 1), 0);
11992 return fold_build2 (EQ_EXPR, type,
11993 fold_convert (TREE_TYPE (arg1), arg0),
11996 arg1 = const_binop (PLUS_EXPR, arg1,
11997 build_int_cst (TREE_TYPE (arg1), 1), 0);
11998 return fold_build2 (NE_EXPR, type,
11999 fold_convert (TREE_TYPE (arg1), arg0),
12004 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12006 && TREE_INT_CST_LOW (arg1) == min_lo)
12010 return omit_one_operand (type, integer_zero_node, arg0);
12013 return fold_build2 (EQ_EXPR, type, op0, op1);
12016 return omit_one_operand (type, integer_one_node, arg0);
12019 return fold_build2 (NE_EXPR, type, op0, op1);
12024 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12026 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12030 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12031 return fold_build2 (NE_EXPR, type,
12032 fold_convert (TREE_TYPE (arg1), arg0),
12035 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12036 return fold_build2 (EQ_EXPR, type,
12037 fold_convert (TREE_TYPE (arg1), arg0),
12043 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12044 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12045 && TYPE_UNSIGNED (arg1_type)
12046 /* We will flip the signedness of the comparison operator
12047 associated with the mode of arg1, so the sign bit is
12048 specified by this mode. Check that arg1 is the signed
12049 max associated with this sign bit. */
12050 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12051 /* signed_type does not work on pointer types. */
12052 && INTEGRAL_TYPE_P (arg1_type))
12054 /* The following case also applies to X < signed_max+1
12055 and X >= signed_max+1 because previous transformations. */
12056 if (code == LE_EXPR || code == GT_EXPR)
12059 st = signed_type_for (TREE_TYPE (arg1));
12060 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12061 type, fold_convert (st, arg0),
12062 build_int_cst (st, 0));
12068 /* If we are comparing an ABS_EXPR with a constant, we can
12069 convert all the cases into explicit comparisons, but they may
12070 well not be faster than doing the ABS and one comparison.
12071 But ABS (X) <= C is a range comparison, which becomes a subtraction
12072 and a comparison, and is probably faster. */
12073 if (code == LE_EXPR
12074 && TREE_CODE (arg1) == INTEGER_CST
12075 && TREE_CODE (arg0) == ABS_EXPR
12076 && ! TREE_SIDE_EFFECTS (arg0)
12077 && (0 != (tem = negate_expr (arg1)))
12078 && TREE_CODE (tem) == INTEGER_CST
12079 && !TREE_OVERFLOW (tem))
12080 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12081 build2 (GE_EXPR, type,
12082 TREE_OPERAND (arg0, 0), tem),
12083 build2 (LE_EXPR, type,
12084 TREE_OPERAND (arg0, 0), arg1));
12086 /* Convert ABS_EXPR<x> >= 0 to true. */
12087 strict_overflow_p = false;
12088 if (code == GE_EXPR
12089 && (integer_zerop (arg1)
12090 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12091 && real_zerop (arg1)))
12092 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12094 if (strict_overflow_p)
12095 fold_overflow_warning (("assuming signed overflow does not occur "
12096 "when simplifying comparison of "
12097 "absolute value and zero"),
12098 WARN_STRICT_OVERFLOW_CONDITIONAL);
12099 return omit_one_operand (type, integer_one_node, arg0);
12102 /* Convert ABS_EXPR<x> < 0 to false. */
12103 strict_overflow_p = false;
12104 if (code == LT_EXPR
12105 && (integer_zerop (arg1) || real_zerop (arg1))
12106 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12108 if (strict_overflow_p)
12109 fold_overflow_warning (("assuming signed overflow does not occur "
12110 "when simplifying comparison of "
12111 "absolute value and zero"),
12112 WARN_STRICT_OVERFLOW_CONDITIONAL);
12113 return omit_one_operand (type, integer_zero_node, arg0);
12116 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12117 and similarly for >= into !=. */
12118 if ((code == LT_EXPR || code == GE_EXPR)
12119 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12120 && TREE_CODE (arg1) == LSHIFT_EXPR
12121 && integer_onep (TREE_OPERAND (arg1, 0)))
12122 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12123 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12124 TREE_OPERAND (arg1, 1)),
12125 build_int_cst (TREE_TYPE (arg0), 0));
12127 if ((code == LT_EXPR || code == GE_EXPR)
12128 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12129 && (TREE_CODE (arg1) == NOP_EXPR
12130 || TREE_CODE (arg1) == CONVERT_EXPR)
12131 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12132 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12134 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12135 fold_convert (TREE_TYPE (arg0),
12136 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12137 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12139 build_int_cst (TREE_TYPE (arg0), 0));
12143 case UNORDERED_EXPR:
12151 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12153 t1 = fold_relational_const (code, type, arg0, arg1);
12154 if (t1 != NULL_TREE)
12158 /* If the first operand is NaN, the result is constant. */
12159 if (TREE_CODE (arg0) == REAL_CST
12160 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12161 && (code != LTGT_EXPR || ! flag_trapping_math))
12163 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12164 ? integer_zero_node
12165 : integer_one_node;
12166 return omit_one_operand (type, t1, arg1);
12169 /* If the second operand is NaN, the result is constant. */
12170 if (TREE_CODE (arg1) == REAL_CST
12171 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12172 && (code != LTGT_EXPR || ! flag_trapping_math))
12174 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12175 ? integer_zero_node
12176 : integer_one_node;
12177 return omit_one_operand (type, t1, arg0);
12180 /* Simplify unordered comparison of something with itself. */
12181 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12182 && operand_equal_p (arg0, arg1, 0))
12183 return constant_boolean_node (1, type);
12185 if (code == LTGT_EXPR
12186 && !flag_trapping_math
12187 && operand_equal_p (arg0, arg1, 0))
12188 return constant_boolean_node (0, type);
12190 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12192 tree targ0 = strip_float_extensions (arg0);
12193 tree targ1 = strip_float_extensions (arg1);
12194 tree newtype = TREE_TYPE (targ0);
12196 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12197 newtype = TREE_TYPE (targ1);
12199 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12200 return fold_build2 (code, type, fold_convert (newtype, targ0),
12201 fold_convert (newtype, targ1));
12206 case COMPOUND_EXPR:
12207 /* When pedantic, a compound expression can be neither an lvalue
12208 nor an integer constant expression. */
12209 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12211 /* Don't let (0, 0) be null pointer constant. */
12212 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12213 : fold_convert (type, arg1);
12214 return pedantic_non_lvalue (tem);
12217 if ((TREE_CODE (arg0) == REAL_CST
12218 && TREE_CODE (arg1) == REAL_CST)
12219 || (TREE_CODE (arg0) == INTEGER_CST
12220 && TREE_CODE (arg1) == INTEGER_CST))
12221 return build_complex (type, arg0, arg1);
12225 /* An ASSERT_EXPR should never be passed to fold_binary. */
12226 gcc_unreachable ();
12230 } /* switch (code) */
12233 /* Callback for walk_tree, looking for LABEL_EXPR.
12234 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12235 Do not check the sub-tree of GOTO_EXPR. */
12238 contains_label_1 (tree *tp,
12239 int *walk_subtrees,
12240 void *data ATTRIBUTE_UNUSED)
12242 switch (TREE_CODE (*tp))
12247 *walk_subtrees = 0;
12254 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12255 accessible from outside the sub-tree. Returns NULL_TREE if no
12256 addressable label is found. */
12259 contains_label_p (tree st)
12261 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12264 /* Fold a ternary expression of code CODE and type TYPE with operands
12265 OP0, OP1, and OP2. Return the folded expression if folding is
12266 successful. Otherwise, return NULL_TREE. */
12269 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12272 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12273 enum tree_code_class kind = TREE_CODE_CLASS (code);
12275 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12276 && TREE_CODE_LENGTH (code) == 3);
12278 /* Strip any conversions that don't change the mode. This is safe
12279 for every expression, except for a comparison expression because
12280 its signedness is derived from its operands. So, in the latter
12281 case, only strip conversions that don't change the signedness.
12283 Note that this is done as an internal manipulation within the
12284 constant folder, in order to find the simplest representation of
12285 the arguments so that their form can be studied. In any cases,
12286 the appropriate type conversions should be put back in the tree
12287 that will get out of the constant folder. */
12302 case COMPONENT_REF:
12303 if (TREE_CODE (arg0) == CONSTRUCTOR
12304 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12306 unsigned HOST_WIDE_INT idx;
12308 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12315 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12316 so all simple results must be passed through pedantic_non_lvalue. */
12317 if (TREE_CODE (arg0) == INTEGER_CST)
12319 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12320 tem = integer_zerop (arg0) ? op2 : op1;
12321 /* Only optimize constant conditions when the selected branch
12322 has the same type as the COND_EXPR. This avoids optimizing
12323 away "c ? x : throw", where the throw has a void type.
12324 Avoid throwing away that operand which contains label. */
12325 if ((!TREE_SIDE_EFFECTS (unused_op)
12326 || !contains_label_p (unused_op))
12327 && (! VOID_TYPE_P (TREE_TYPE (tem))
12328 || VOID_TYPE_P (type)))
12329 return pedantic_non_lvalue (tem);
12332 if (operand_equal_p (arg1, op2, 0))
12333 return pedantic_omit_one_operand (type, arg1, arg0);
12335 /* If we have A op B ? A : C, we may be able to convert this to a
12336 simpler expression, depending on the operation and the values
12337 of B and C. Signed zeros prevent all of these transformations,
12338 for reasons given above each one.
12340 Also try swapping the arguments and inverting the conditional. */
12341 if (COMPARISON_CLASS_P (arg0)
12342 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12343 arg1, TREE_OPERAND (arg0, 1))
12344 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12346 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12351 if (COMPARISON_CLASS_P (arg0)
12352 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12354 TREE_OPERAND (arg0, 1))
12355 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12357 tem = fold_truth_not_expr (arg0);
12358 if (tem && COMPARISON_CLASS_P (tem))
12360 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12366 /* If the second operand is simpler than the third, swap them
12367 since that produces better jump optimization results. */
12368 if (truth_value_p (TREE_CODE (arg0))
12369 && tree_swap_operands_p (op1, op2, false))
12371 /* See if this can be inverted. If it can't, possibly because
12372 it was a floating-point inequality comparison, don't do
12374 tem = fold_truth_not_expr (arg0);
12376 return fold_build3 (code, type, tem, op2, op1);
12379 /* Convert A ? 1 : 0 to simply A. */
12380 if (integer_onep (op1)
12381 && integer_zerop (op2)
12382 /* If we try to convert OP0 to our type, the
12383 call to fold will try to move the conversion inside
12384 a COND, which will recurse. In that case, the COND_EXPR
12385 is probably the best choice, so leave it alone. */
12386 && type == TREE_TYPE (arg0))
12387 return pedantic_non_lvalue (arg0);
12389 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12390 over COND_EXPR in cases such as floating point comparisons. */
12391 if (integer_zerop (op1)
12392 && integer_onep (op2)
12393 && truth_value_p (TREE_CODE (arg0)))
12394 return pedantic_non_lvalue (fold_convert (type,
12395 invert_truthvalue (arg0)));
12397 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12398 if (TREE_CODE (arg0) == LT_EXPR
12399 && integer_zerop (TREE_OPERAND (arg0, 1))
12400 && integer_zerop (op2)
12401 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12403 /* sign_bit_p only checks ARG1 bits within A's precision.
12404 If <sign bit of A> has wider type than A, bits outside
12405 of A's precision in <sign bit of A> need to be checked.
12406 If they are all 0, this optimization needs to be done
12407 in unsigned A's type, if they are all 1 in signed A's type,
12408 otherwise this can't be done. */
12409 if (TYPE_PRECISION (TREE_TYPE (tem))
12410 < TYPE_PRECISION (TREE_TYPE (arg1))
12411 && TYPE_PRECISION (TREE_TYPE (tem))
12412 < TYPE_PRECISION (type))
12414 unsigned HOST_WIDE_INT mask_lo;
12415 HOST_WIDE_INT mask_hi;
12416 int inner_width, outer_width;
12419 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12420 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12421 if (outer_width > TYPE_PRECISION (type))
12422 outer_width = TYPE_PRECISION (type);
12424 if (outer_width > HOST_BITS_PER_WIDE_INT)
12426 mask_hi = ((unsigned HOST_WIDE_INT) -1
12427 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12433 mask_lo = ((unsigned HOST_WIDE_INT) -1
12434 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12436 if (inner_width > HOST_BITS_PER_WIDE_INT)
12438 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12439 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12443 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12444 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12446 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12447 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12449 tem_type = signed_type_for (TREE_TYPE (tem));
12450 tem = fold_convert (tem_type, tem);
12452 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12453 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12455 tem_type = unsigned_type_for (TREE_TYPE (tem));
12456 tem = fold_convert (tem_type, tem);
12463 return fold_convert (type,
12464 fold_build2 (BIT_AND_EXPR,
12465 TREE_TYPE (tem), tem,
12466 fold_convert (TREE_TYPE (tem),
12470 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12471 already handled above. */
12472 if (TREE_CODE (arg0) == BIT_AND_EXPR
12473 && integer_onep (TREE_OPERAND (arg0, 1))
12474 && integer_zerop (op2)
12475 && integer_pow2p (arg1))
12477 tree tem = TREE_OPERAND (arg0, 0);
12479 if (TREE_CODE (tem) == RSHIFT_EXPR
12480 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12481 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12482 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12483 return fold_build2 (BIT_AND_EXPR, type,
12484 TREE_OPERAND (tem, 0), arg1);
12487 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12488 is probably obsolete because the first operand should be a
12489 truth value (that's why we have the two cases above), but let's
12490 leave it in until we can confirm this for all front-ends. */
12491 if (integer_zerop (op2)
12492 && TREE_CODE (arg0) == NE_EXPR
12493 && integer_zerop (TREE_OPERAND (arg0, 1))
12494 && integer_pow2p (arg1)
12495 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12496 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12497 arg1, OEP_ONLY_CONST))
12498 return pedantic_non_lvalue (fold_convert (type,
12499 TREE_OPERAND (arg0, 0)));
12501 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12502 if (integer_zerop (op2)
12503 && truth_value_p (TREE_CODE (arg0))
12504 && truth_value_p (TREE_CODE (arg1)))
12505 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12506 fold_convert (type, arg0),
12509 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12510 if (integer_onep (op2)
12511 && truth_value_p (TREE_CODE (arg0))
12512 && truth_value_p (TREE_CODE (arg1)))
12514 /* Only perform transformation if ARG0 is easily inverted. */
12515 tem = fold_truth_not_expr (arg0);
12517 return fold_build2 (TRUTH_ORIF_EXPR, type,
12518 fold_convert (type, tem),
12522 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12523 if (integer_zerop (arg1)
12524 && truth_value_p (TREE_CODE (arg0))
12525 && truth_value_p (TREE_CODE (op2)))
12527 /* Only perform transformation if ARG0 is easily inverted. */
12528 tem = fold_truth_not_expr (arg0);
12530 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12531 fold_convert (type, tem),
12535 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12536 if (integer_onep (arg1)
12537 && truth_value_p (TREE_CODE (arg0))
12538 && truth_value_p (TREE_CODE (op2)))
12539 return fold_build2 (TRUTH_ORIF_EXPR, type,
12540 fold_convert (type, arg0),
12546 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12547 of fold_ternary on them. */
12548 gcc_unreachable ();
12550 case BIT_FIELD_REF:
12551 if ((TREE_CODE (arg0) == VECTOR_CST
12552 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12553 && type == TREE_TYPE (TREE_TYPE (arg0))
12554 && host_integerp (arg1, 1)
12555 && host_integerp (op2, 1))
12557 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12558 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12561 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12562 && (idx % width) == 0
12563 && (idx = idx / width)
12564 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12566 tree elements = NULL_TREE;
12568 if (TREE_CODE (arg0) == VECTOR_CST)
12569 elements = TREE_VECTOR_CST_ELTS (arg0);
12572 unsigned HOST_WIDE_INT idx;
12575 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12576 elements = tree_cons (NULL_TREE, value, elements);
12578 while (idx-- > 0 && elements)
12579 elements = TREE_CHAIN (elements);
12581 return TREE_VALUE (elements);
12583 return fold_convert (type, integer_zero_node);
12590 } /* switch (code) */
12593 /* Perform constant folding and related simplification of EXPR.
12594 The related simplifications include x*1 => x, x*0 => 0, etc.,
12595 and application of the associative law.
12596 NOP_EXPR conversions may be removed freely (as long as we
12597 are careful not to change the type of the overall expression).
12598 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12599 but we can constant-fold them if they have constant operands. */
12601 #ifdef ENABLE_FOLD_CHECKING
12602 # define fold(x) fold_1 (x)
12603 static tree fold_1 (tree);
12609 const tree t = expr;
12610 enum tree_code code = TREE_CODE (t);
12611 enum tree_code_class kind = TREE_CODE_CLASS (code);
12614 /* Return right away if a constant. */
12615 if (kind == tcc_constant)
12618 /* CALL_EXPR-like objects with variable numbers of operands are
12619 treated specially. */
12620 if (kind == tcc_vl_exp)
12622 if (code == CALL_EXPR)
12624 tem = fold_call_expr (expr, false);
12625 return tem ? tem : expr;
12630 if (IS_EXPR_CODE_CLASS (kind)
12631 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12633 tree type = TREE_TYPE (t);
12634 tree op0, op1, op2;
12636 switch (TREE_CODE_LENGTH (code))
12639 op0 = TREE_OPERAND (t, 0);
12640 tem = fold_unary (code, type, op0);
12641 return tem ? tem : expr;
12643 op0 = TREE_OPERAND (t, 0);
12644 op1 = TREE_OPERAND (t, 1);
12645 tem = fold_binary (code, type, op0, op1);
12646 return tem ? tem : expr;
12648 op0 = TREE_OPERAND (t, 0);
12649 op1 = TREE_OPERAND (t, 1);
12650 op2 = TREE_OPERAND (t, 2);
12651 tem = fold_ternary (code, type, op0, op1, op2);
12652 return tem ? tem : expr;
12661 return fold (DECL_INITIAL (t));
12665 } /* switch (code) */
12668 #ifdef ENABLE_FOLD_CHECKING
12671 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12672 static void fold_check_failed (tree, tree);
12673 void print_fold_checksum (tree);
12675 /* When --enable-checking=fold, compute a digest of expr before
12676 and after actual fold call to see if fold did not accidentally
12677 change original expr. */
12683 struct md5_ctx ctx;
12684 unsigned char checksum_before[16], checksum_after[16];
12687 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12688 md5_init_ctx (&ctx);
12689 fold_checksum_tree (expr, &ctx, ht);
12690 md5_finish_ctx (&ctx, checksum_before);
12693 ret = fold_1 (expr);
12695 md5_init_ctx (&ctx);
12696 fold_checksum_tree (expr, &ctx, ht);
12697 md5_finish_ctx (&ctx, checksum_after);
12700 if (memcmp (checksum_before, checksum_after, 16))
12701 fold_check_failed (expr, ret);
12707 print_fold_checksum (tree expr)
12709 struct md5_ctx ctx;
12710 unsigned char checksum[16], cnt;
12713 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12714 md5_init_ctx (&ctx);
12715 fold_checksum_tree (expr, &ctx, ht);
12716 md5_finish_ctx (&ctx, checksum);
12718 for (cnt = 0; cnt < 16; ++cnt)
12719 fprintf (stderr, "%02x", checksum[cnt]);
12720 putc ('\n', stderr);
12724 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12726 internal_error ("fold check: original tree changed by fold");
12730 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12733 enum tree_code code;
12734 struct tree_function_decl buf;
12739 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12740 <= sizeof (struct tree_function_decl))
12741 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12744 slot = htab_find_slot (ht, expr, INSERT);
12748 code = TREE_CODE (expr);
12749 if (TREE_CODE_CLASS (code) == tcc_declaration
12750 && DECL_ASSEMBLER_NAME_SET_P (expr))
12752 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12753 memcpy ((char *) &buf, expr, tree_size (expr));
12754 expr = (tree) &buf;
12755 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12757 else if (TREE_CODE_CLASS (code) == tcc_type
12758 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12759 || TYPE_CACHED_VALUES_P (expr)
12760 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12762 /* Allow these fields to be modified. */
12763 memcpy ((char *) &buf, expr, tree_size (expr));
12764 expr = (tree) &buf;
12765 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12766 TYPE_POINTER_TO (expr) = NULL;
12767 TYPE_REFERENCE_TO (expr) = NULL;
12768 if (TYPE_CACHED_VALUES_P (expr))
12770 TYPE_CACHED_VALUES_P (expr) = 0;
12771 TYPE_CACHED_VALUES (expr) = NULL;
12774 md5_process_bytes (expr, tree_size (expr), ctx);
12775 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12776 if (TREE_CODE_CLASS (code) != tcc_type
12777 && TREE_CODE_CLASS (code) != tcc_declaration
12778 && code != TREE_LIST)
12779 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12780 switch (TREE_CODE_CLASS (code))
12786 md5_process_bytes (TREE_STRING_POINTER (expr),
12787 TREE_STRING_LENGTH (expr), ctx);
12790 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12791 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12794 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12800 case tcc_exceptional:
12804 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12805 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12806 expr = TREE_CHAIN (expr);
12807 goto recursive_label;
12810 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12811 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12817 case tcc_expression:
12818 case tcc_reference:
12819 case tcc_comparison:
12822 case tcc_statement:
12824 len = TREE_OPERAND_LENGTH (expr);
12825 for (i = 0; i < len; ++i)
12826 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12828 case tcc_declaration:
12829 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12830 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12831 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12833 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12834 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12835 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12836 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12837 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12839 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12840 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12842 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12844 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12845 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12846 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12850 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12851 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12852 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12853 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12854 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12855 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12856 if (INTEGRAL_TYPE_P (expr)
12857 || SCALAR_FLOAT_TYPE_P (expr))
12859 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12860 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12862 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12863 if (TREE_CODE (expr) == RECORD_TYPE
12864 || TREE_CODE (expr) == UNION_TYPE
12865 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12866 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12867 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12876 /* Fold a unary tree expression with code CODE of type TYPE with an
12877 operand OP0. Return a folded expression if successful. Otherwise,
12878 return a tree expression with code CODE of type TYPE with an
12882 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12885 #ifdef ENABLE_FOLD_CHECKING
12886 unsigned char checksum_before[16], checksum_after[16];
12887 struct md5_ctx ctx;
12890 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12891 md5_init_ctx (&ctx);
12892 fold_checksum_tree (op0, &ctx, ht);
12893 md5_finish_ctx (&ctx, checksum_before);
12897 tem = fold_unary (code, type, op0);
12899 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12901 #ifdef ENABLE_FOLD_CHECKING
12902 md5_init_ctx (&ctx);
12903 fold_checksum_tree (op0, &ctx, ht);
12904 md5_finish_ctx (&ctx, checksum_after);
12907 if (memcmp (checksum_before, checksum_after, 16))
12908 fold_check_failed (op0, tem);
12913 /* Fold a binary tree expression with code CODE of type TYPE with
12914 operands OP0 and OP1. Return a folded expression if successful.
12915 Otherwise, return a tree expression with code CODE of type TYPE
12916 with operands OP0 and OP1. */
12919 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12923 #ifdef ENABLE_FOLD_CHECKING
12924 unsigned char checksum_before_op0[16],
12925 checksum_before_op1[16],
12926 checksum_after_op0[16],
12927 checksum_after_op1[16];
12928 struct md5_ctx ctx;
12931 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12932 md5_init_ctx (&ctx);
12933 fold_checksum_tree (op0, &ctx, ht);
12934 md5_finish_ctx (&ctx, checksum_before_op0);
12937 md5_init_ctx (&ctx);
12938 fold_checksum_tree (op1, &ctx, ht);
12939 md5_finish_ctx (&ctx, checksum_before_op1);
12943 tem = fold_binary (code, type, op0, op1);
12945 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12947 #ifdef ENABLE_FOLD_CHECKING
12948 md5_init_ctx (&ctx);
12949 fold_checksum_tree (op0, &ctx, ht);
12950 md5_finish_ctx (&ctx, checksum_after_op0);
12953 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12954 fold_check_failed (op0, tem);
12956 md5_init_ctx (&ctx);
12957 fold_checksum_tree (op1, &ctx, ht);
12958 md5_finish_ctx (&ctx, checksum_after_op1);
12961 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12962 fold_check_failed (op1, tem);
12967 /* Fold a ternary tree expression with code CODE of type TYPE with
12968 operands OP0, OP1, and OP2. Return a folded expression if
12969 successful. Otherwise, return a tree expression with code CODE of
12970 type TYPE with operands OP0, OP1, and OP2. */
12973 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12977 #ifdef ENABLE_FOLD_CHECKING
12978 unsigned char checksum_before_op0[16],
12979 checksum_before_op1[16],
12980 checksum_before_op2[16],
12981 checksum_after_op0[16],
12982 checksum_after_op1[16],
12983 checksum_after_op2[16];
12984 struct md5_ctx ctx;
12987 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12988 md5_init_ctx (&ctx);
12989 fold_checksum_tree (op0, &ctx, ht);
12990 md5_finish_ctx (&ctx, checksum_before_op0);
12993 md5_init_ctx (&ctx);
12994 fold_checksum_tree (op1, &ctx, ht);
12995 md5_finish_ctx (&ctx, checksum_before_op1);
12998 md5_init_ctx (&ctx);
12999 fold_checksum_tree (op2, &ctx, ht);
13000 md5_finish_ctx (&ctx, checksum_before_op2);
13004 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13005 tem = fold_ternary (code, type, op0, op1, op2);
13007 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13009 #ifdef ENABLE_FOLD_CHECKING
13010 md5_init_ctx (&ctx);
13011 fold_checksum_tree (op0, &ctx, ht);
13012 md5_finish_ctx (&ctx, checksum_after_op0);
13015 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13016 fold_check_failed (op0, tem);
13018 md5_init_ctx (&ctx);
13019 fold_checksum_tree (op1, &ctx, ht);
13020 md5_finish_ctx (&ctx, checksum_after_op1);
13023 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13024 fold_check_failed (op1, tem);
13026 md5_init_ctx (&ctx);
13027 fold_checksum_tree (op2, &ctx, ht);
13028 md5_finish_ctx (&ctx, checksum_after_op2);
13031 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13032 fold_check_failed (op2, tem);
13037 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13038 arguments in ARGARRAY, and a null static chain.
13039 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13040 of type TYPE from the given operands as constructed by build_call_array. */
13043 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13046 #ifdef ENABLE_FOLD_CHECKING
13047 unsigned char checksum_before_fn[16],
13048 checksum_before_arglist[16],
13049 checksum_after_fn[16],
13050 checksum_after_arglist[16];
13051 struct md5_ctx ctx;
13055 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13056 md5_init_ctx (&ctx);
13057 fold_checksum_tree (fn, &ctx, ht);
13058 md5_finish_ctx (&ctx, checksum_before_fn);
13061 md5_init_ctx (&ctx);
13062 for (i = 0; i < nargs; i++)
13063 fold_checksum_tree (argarray[i], &ctx, ht);
13064 md5_finish_ctx (&ctx, checksum_before_arglist);
13068 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13070 #ifdef ENABLE_FOLD_CHECKING
13071 md5_init_ctx (&ctx);
13072 fold_checksum_tree (fn, &ctx, ht);
13073 md5_finish_ctx (&ctx, checksum_after_fn);
13076 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13077 fold_check_failed (fn, tem);
13079 md5_init_ctx (&ctx);
13080 for (i = 0; i < nargs; i++)
13081 fold_checksum_tree (argarray[i], &ctx, ht);
13082 md5_finish_ctx (&ctx, checksum_after_arglist);
13085 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13086 fold_check_failed (NULL_TREE, tem);
13091 /* Perform constant folding and related simplification of initializer
13092 expression EXPR. These behave identically to "fold_buildN" but ignore
13093 potential run-time traps and exceptions that fold must preserve. */
13095 #define START_FOLD_INIT \
13096 int saved_signaling_nans = flag_signaling_nans;\
13097 int saved_trapping_math = flag_trapping_math;\
13098 int saved_rounding_math = flag_rounding_math;\
13099 int saved_trapv = flag_trapv;\
13100 int saved_folding_initializer = folding_initializer;\
13101 flag_signaling_nans = 0;\
13102 flag_trapping_math = 0;\
13103 flag_rounding_math = 0;\
13105 folding_initializer = 1;
13107 #define END_FOLD_INIT \
13108 flag_signaling_nans = saved_signaling_nans;\
13109 flag_trapping_math = saved_trapping_math;\
13110 flag_rounding_math = saved_rounding_math;\
13111 flag_trapv = saved_trapv;\
13112 folding_initializer = saved_folding_initializer;
13115 fold_build1_initializer (enum tree_code code, tree type, tree op)
13120 result = fold_build1 (code, type, op);
13127 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13132 result = fold_build2 (code, type, op0, op1);
13139 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13145 result = fold_build3 (code, type, op0, op1, op2);
13152 fold_build_call_array_initializer (tree type, tree fn,
13153 int nargs, tree *argarray)
13158 result = fold_build_call_array (type, fn, nargs, argarray);
13164 #undef START_FOLD_INIT
13165 #undef END_FOLD_INIT
13167 /* Determine if first argument is a multiple of second argument. Return 0 if
13168 it is not, or we cannot easily determined it to be.
13170 An example of the sort of thing we care about (at this point; this routine
13171 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13172 fold cases do now) is discovering that
13174 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13180 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13182 This code also handles discovering that
13184 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13186 is a multiple of 8 so we don't have to worry about dealing with a
13187 possible remainder.
13189 Note that we *look* inside a SAVE_EXPR only to determine how it was
13190 calculated; it is not safe for fold to do much of anything else with the
13191 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13192 at run time. For example, the latter example above *cannot* be implemented
13193 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13194 evaluation time of the original SAVE_EXPR is not necessarily the same at
13195 the time the new expression is evaluated. The only optimization of this
13196 sort that would be valid is changing
13198 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13202 SAVE_EXPR (I) * SAVE_EXPR (J)
13204 (where the same SAVE_EXPR (J) is used in the original and the
13205 transformed version). */
13208 multiple_of_p (tree type, tree top, tree bottom)
13210 if (operand_equal_p (top, bottom, 0))
13213 if (TREE_CODE (type) != INTEGER_TYPE)
13216 switch (TREE_CODE (top))
13219 /* Bitwise and provides a power of two multiple. If the mask is
13220 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13221 if (!integer_pow2p (bottom))
13226 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13227 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13231 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13232 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13235 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13239 op1 = TREE_OPERAND (top, 1);
13240 /* const_binop may not detect overflow correctly,
13241 so check for it explicitly here. */
13242 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13243 > TREE_INT_CST_LOW (op1)
13244 && TREE_INT_CST_HIGH (op1) == 0
13245 && 0 != (t1 = fold_convert (type,
13246 const_binop (LSHIFT_EXPR,
13249 && !TREE_OVERFLOW (t1))
13250 return multiple_of_p (type, t1, bottom);
13255 /* Can't handle conversions from non-integral or wider integral type. */
13256 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13257 || (TYPE_PRECISION (type)
13258 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13261 /* .. fall through ... */
13264 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13267 if (TREE_CODE (bottom) != INTEGER_CST
13268 || integer_zerop (bottom)
13269 || (TYPE_UNSIGNED (type)
13270 && (tree_int_cst_sgn (top) < 0
13271 || tree_int_cst_sgn (bottom) < 0)))
13273 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13281 /* Return true if `t' is known to be non-negative. If the return
13282 value is based on the assumption that signed overflow is undefined,
13283 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13284 *STRICT_OVERFLOW_P. */
13287 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13289 if (t == error_mark_node)
13292 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13295 switch (TREE_CODE (t))
13298 /* Query VRP to see if it has recorded any information about
13299 the range of this object. */
13300 return ssa_name_nonnegative_p (t);
13303 /* We can't return 1 if flag_wrapv is set because
13304 ABS_EXPR<INT_MIN> = INT_MIN. */
13305 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13307 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13309 *strict_overflow_p = true;
13315 return tree_int_cst_sgn (t) >= 0;
13318 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13320 case POINTER_PLUS_EXPR:
13322 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13323 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13325 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13326 strict_overflow_p));
13328 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13329 both unsigned and at least 2 bits shorter than the result. */
13330 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13331 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13332 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13334 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13335 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13336 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13337 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13339 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13340 TYPE_PRECISION (inner2)) + 1;
13341 return prec < TYPE_PRECISION (TREE_TYPE (t));
13347 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13349 /* x * x for floating point x is always non-negative. */
13350 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13352 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13354 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13355 strict_overflow_p));
13358 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13359 both unsigned and their total bits is shorter than the result. */
13360 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13361 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13362 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13364 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13365 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13366 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13367 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13368 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13369 < TYPE_PRECISION (TREE_TYPE (t));
13375 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13377 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13378 strict_overflow_p));
13384 case TRUNC_DIV_EXPR:
13385 case CEIL_DIV_EXPR:
13386 case FLOOR_DIV_EXPR:
13387 case ROUND_DIV_EXPR:
13388 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13390 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13391 strict_overflow_p));
13393 case TRUNC_MOD_EXPR:
13394 case CEIL_MOD_EXPR:
13395 case FLOOR_MOD_EXPR:
13396 case ROUND_MOD_EXPR:
13398 case NON_LVALUE_EXPR:
13400 case FIX_TRUNC_EXPR:
13401 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13402 strict_overflow_p);
13404 case COMPOUND_EXPR:
13406 case GIMPLE_MODIFY_STMT:
13407 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13408 strict_overflow_p);
13411 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13412 strict_overflow_p);
13415 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13417 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13418 strict_overflow_p));
13422 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13423 tree outer_type = TREE_TYPE (t);
13425 if (TREE_CODE (outer_type) == REAL_TYPE)
13427 if (TREE_CODE (inner_type) == REAL_TYPE)
13428 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13429 strict_overflow_p);
13430 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13432 if (TYPE_UNSIGNED (inner_type))
13434 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13435 strict_overflow_p);
13438 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13440 if (TREE_CODE (inner_type) == REAL_TYPE)
13441 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13442 strict_overflow_p);
13443 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13444 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13445 && TYPE_UNSIGNED (inner_type);
13452 tree temp = TARGET_EXPR_SLOT (t);
13453 t = TARGET_EXPR_INITIAL (t);
13455 /* If the initializer is non-void, then it's a normal expression
13456 that will be assigned to the slot. */
13457 if (!VOID_TYPE_P (t))
13458 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13460 /* Otherwise, the initializer sets the slot in some way. One common
13461 way is an assignment statement at the end of the initializer. */
13464 if (TREE_CODE (t) == BIND_EXPR)
13465 t = expr_last (BIND_EXPR_BODY (t));
13466 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13467 || TREE_CODE (t) == TRY_CATCH_EXPR)
13468 t = expr_last (TREE_OPERAND (t, 0));
13469 else if (TREE_CODE (t) == STATEMENT_LIST)
13474 if ((TREE_CODE (t) == MODIFY_EXPR
13475 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13476 && GENERIC_TREE_OPERAND (t, 0) == temp)
13477 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13478 strict_overflow_p);
13485 tree fndecl = get_callee_fndecl (t);
13486 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13487 switch (DECL_FUNCTION_CODE (fndecl))
13489 CASE_FLT_FN (BUILT_IN_ACOS):
13490 CASE_FLT_FN (BUILT_IN_ACOSH):
13491 CASE_FLT_FN (BUILT_IN_CABS):
13492 CASE_FLT_FN (BUILT_IN_COSH):
13493 CASE_FLT_FN (BUILT_IN_ERFC):
13494 CASE_FLT_FN (BUILT_IN_EXP):
13495 CASE_FLT_FN (BUILT_IN_EXP10):
13496 CASE_FLT_FN (BUILT_IN_EXP2):
13497 CASE_FLT_FN (BUILT_IN_FABS):
13498 CASE_FLT_FN (BUILT_IN_FDIM):
13499 CASE_FLT_FN (BUILT_IN_HYPOT):
13500 CASE_FLT_FN (BUILT_IN_POW10):
13501 CASE_INT_FN (BUILT_IN_FFS):
13502 CASE_INT_FN (BUILT_IN_PARITY):
13503 CASE_INT_FN (BUILT_IN_POPCOUNT):
13504 case BUILT_IN_BSWAP32:
13505 case BUILT_IN_BSWAP64:
13509 CASE_FLT_FN (BUILT_IN_SQRT):
13510 /* sqrt(-0.0) is -0.0. */
13511 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13513 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13514 strict_overflow_p);
13516 CASE_FLT_FN (BUILT_IN_ASINH):
13517 CASE_FLT_FN (BUILT_IN_ATAN):
13518 CASE_FLT_FN (BUILT_IN_ATANH):
13519 CASE_FLT_FN (BUILT_IN_CBRT):
13520 CASE_FLT_FN (BUILT_IN_CEIL):
13521 CASE_FLT_FN (BUILT_IN_ERF):
13522 CASE_FLT_FN (BUILT_IN_EXPM1):
13523 CASE_FLT_FN (BUILT_IN_FLOOR):
13524 CASE_FLT_FN (BUILT_IN_FMOD):
13525 CASE_FLT_FN (BUILT_IN_FREXP):
13526 CASE_FLT_FN (BUILT_IN_LCEIL):
13527 CASE_FLT_FN (BUILT_IN_LDEXP):
13528 CASE_FLT_FN (BUILT_IN_LFLOOR):
13529 CASE_FLT_FN (BUILT_IN_LLCEIL):
13530 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13531 CASE_FLT_FN (BUILT_IN_LLRINT):
13532 CASE_FLT_FN (BUILT_IN_LLROUND):
13533 CASE_FLT_FN (BUILT_IN_LRINT):
13534 CASE_FLT_FN (BUILT_IN_LROUND):
13535 CASE_FLT_FN (BUILT_IN_MODF):
13536 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13537 CASE_FLT_FN (BUILT_IN_RINT):
13538 CASE_FLT_FN (BUILT_IN_ROUND):
13539 CASE_FLT_FN (BUILT_IN_SCALB):
13540 CASE_FLT_FN (BUILT_IN_SCALBLN):
13541 CASE_FLT_FN (BUILT_IN_SCALBN):
13542 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13543 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13544 CASE_FLT_FN (BUILT_IN_SINH):
13545 CASE_FLT_FN (BUILT_IN_TANH):
13546 CASE_FLT_FN (BUILT_IN_TRUNC):
13547 /* True if the 1st argument is nonnegative. */
13548 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13549 strict_overflow_p);
13551 CASE_FLT_FN (BUILT_IN_FMAX):
13552 /* True if the 1st OR 2nd arguments are nonnegative. */
13553 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13555 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13556 strict_overflow_p)));
13558 CASE_FLT_FN (BUILT_IN_FMIN):
13559 /* True if the 1st AND 2nd arguments are nonnegative. */
13560 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13562 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13563 strict_overflow_p)));
13565 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13566 /* True if the 2nd argument is nonnegative. */
13567 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13568 strict_overflow_p);
13570 CASE_FLT_FN (BUILT_IN_POWI):
13571 /* True if the 1st argument is nonnegative or the second
13572 argument is an even integer. */
13573 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13575 tree arg1 = CALL_EXPR_ARG (t, 1);
13576 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13579 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13580 strict_overflow_p);
13582 CASE_FLT_FN (BUILT_IN_POW):
13583 /* True if the 1st argument is nonnegative or the second
13584 argument is an even integer valued real. */
13585 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13590 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13591 n = real_to_integer (&c);
13594 REAL_VALUE_TYPE cint;
13595 real_from_integer (&cint, VOIDmode, n,
13596 n < 0 ? -1 : 0, 0);
13597 if (real_identical (&c, &cint))
13601 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13602 strict_overflow_p);
13609 /* ... fall through ... */
13613 tree type = TREE_TYPE (t);
13614 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13615 && truth_value_p (TREE_CODE (t)))
13616 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13617 have a signed:1 type (where the value is -1 and 0). */
13622 /* We don't know sign of `t', so be conservative and return false. */
13626 /* Return true if `t' is known to be non-negative. Handle warnings
13627 about undefined signed overflow. */
13630 tree_expr_nonnegative_p (tree t)
13632 bool ret, strict_overflow_p;
13634 strict_overflow_p = false;
13635 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13636 if (strict_overflow_p)
13637 fold_overflow_warning (("assuming signed overflow does not occur when "
13638 "determining that expression is always "
13640 WARN_STRICT_OVERFLOW_MISC);
13644 /* Return true when T is an address and is known to be nonzero.
13645 For floating point we further ensure that T is not denormal.
13646 Similar logic is present in nonzero_address in rtlanal.h.
13648 If the return value is based on the assumption that signed overflow
13649 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13650 change *STRICT_OVERFLOW_P. */
13653 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13655 tree type = TREE_TYPE (t);
13656 bool sub_strict_overflow_p;
13658 /* Doing something useful for floating point would need more work. */
13659 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13662 switch (TREE_CODE (t))
13665 /* Query VRP to see if it has recorded any information about
13666 the range of this object. */
13667 return ssa_name_nonzero_p (t);
13670 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13671 strict_overflow_p);
13674 return !integer_zerop (t);
13676 case POINTER_PLUS_EXPR:
13678 if (TYPE_OVERFLOW_UNDEFINED (type))
13680 /* With the presence of negative values it is hard
13681 to say something. */
13682 sub_strict_overflow_p = false;
13683 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13684 &sub_strict_overflow_p)
13685 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13686 &sub_strict_overflow_p))
13688 /* One of operands must be positive and the other non-negative. */
13689 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13690 overflows, on a twos-complement machine the sum of two
13691 nonnegative numbers can never be zero. */
13692 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13694 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13695 strict_overflow_p));
13700 if (TYPE_OVERFLOW_UNDEFINED (type))
13702 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13704 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13705 strict_overflow_p))
13707 *strict_overflow_p = true;
13715 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13716 tree outer_type = TREE_TYPE (t);
13718 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13719 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13720 strict_overflow_p));
13726 tree base = get_base_address (TREE_OPERAND (t, 0));
13731 /* Weak declarations may link to NULL. */
13732 if (VAR_OR_FUNCTION_DECL_P (base))
13733 return !DECL_WEAK (base);
13735 /* Constants are never weak. */
13736 if (CONSTANT_CLASS_P (base))
13743 sub_strict_overflow_p = false;
13744 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13745 &sub_strict_overflow_p)
13746 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13747 &sub_strict_overflow_p))
13749 if (sub_strict_overflow_p)
13750 *strict_overflow_p = true;
13756 sub_strict_overflow_p = false;
13757 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13758 &sub_strict_overflow_p)
13759 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13760 &sub_strict_overflow_p))
13762 if (sub_strict_overflow_p)
13763 *strict_overflow_p = true;
13768 sub_strict_overflow_p = false;
13769 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13770 &sub_strict_overflow_p))
13772 if (sub_strict_overflow_p)
13773 *strict_overflow_p = true;
13775 /* When both operands are nonzero, then MAX must be too. */
13776 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13777 strict_overflow_p))
13780 /* MAX where operand 0 is positive is positive. */
13781 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13782 strict_overflow_p);
13784 /* MAX where operand 1 is positive is positive. */
13785 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13786 &sub_strict_overflow_p)
13787 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13788 &sub_strict_overflow_p))
13790 if (sub_strict_overflow_p)
13791 *strict_overflow_p = true;
13796 case COMPOUND_EXPR:
13798 case GIMPLE_MODIFY_STMT:
13800 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13801 strict_overflow_p);
13804 case NON_LVALUE_EXPR:
13805 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13806 strict_overflow_p);
13809 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13811 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13812 strict_overflow_p));
13815 return alloca_call_p (t);
13823 /* Return true when T is an address and is known to be nonzero.
13824 Handle warnings about undefined signed overflow. */
13827 tree_expr_nonzero_p (tree t)
13829 bool ret, strict_overflow_p;
13831 strict_overflow_p = false;
13832 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13833 if (strict_overflow_p)
13834 fold_overflow_warning (("assuming signed overflow does not occur when "
13835 "determining that expression is always "
13837 WARN_STRICT_OVERFLOW_MISC);
13841 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13842 attempt to fold the expression to a constant without modifying TYPE,
13845 If the expression could be simplified to a constant, then return
13846 the constant. If the expression would not be simplified to a
13847 constant, then return NULL_TREE. */
13850 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13852 tree tem = fold_binary (code, type, op0, op1);
13853 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13856 /* Given the components of a unary expression CODE, TYPE and OP0,
13857 attempt to fold the expression to a constant without modifying
13860 If the expression could be simplified to a constant, then return
13861 the constant. If the expression would not be simplified to a
13862 constant, then return NULL_TREE. */
13865 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13867 tree tem = fold_unary (code, type, op0);
13868 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13871 /* If EXP represents referencing an element in a constant string
13872 (either via pointer arithmetic or array indexing), return the
13873 tree representing the value accessed, otherwise return NULL. */
13876 fold_read_from_constant_string (tree exp)
13878 if ((TREE_CODE (exp) == INDIRECT_REF
13879 || TREE_CODE (exp) == ARRAY_REF)
13880 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13882 tree exp1 = TREE_OPERAND (exp, 0);
13886 if (TREE_CODE (exp) == INDIRECT_REF)
13887 string = string_constant (exp1, &index);
13890 tree low_bound = array_ref_low_bound (exp);
13891 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13893 /* Optimize the special-case of a zero lower bound.
13895 We convert the low_bound to sizetype to avoid some problems
13896 with constant folding. (E.g. suppose the lower bound is 1,
13897 and its mode is QI. Without the conversion,l (ARRAY
13898 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13899 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13900 if (! integer_zerop (low_bound))
13901 index = size_diffop (index, fold_convert (sizetype, low_bound));
13907 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13908 && TREE_CODE (string) == STRING_CST
13909 && TREE_CODE (index) == INTEGER_CST
13910 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13911 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13913 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13914 return fold_convert (TREE_TYPE (exp),
13915 build_int_cst (NULL_TREE,
13916 (TREE_STRING_POINTER (string)
13917 [TREE_INT_CST_LOW (index)])));
13922 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13923 an integer constant or real constant.
13925 TYPE is the type of the result. */
13928 fold_negate_const (tree arg0, tree type)
13930 tree t = NULL_TREE;
13932 switch (TREE_CODE (arg0))
13936 unsigned HOST_WIDE_INT low;
13937 HOST_WIDE_INT high;
13938 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13939 TREE_INT_CST_HIGH (arg0),
13941 t = force_fit_type_double (type, low, high, 1,
13942 (overflow | TREE_OVERFLOW (arg0))
13943 && !TYPE_UNSIGNED (type));
13948 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13952 gcc_unreachable ();
13958 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13959 an integer constant or real constant.
13961 TYPE is the type of the result. */
13964 fold_abs_const (tree arg0, tree type)
13966 tree t = NULL_TREE;
13968 switch (TREE_CODE (arg0))
13971 /* If the value is unsigned, then the absolute value is
13972 the same as the ordinary value. */
13973 if (TYPE_UNSIGNED (type))
13975 /* Similarly, if the value is non-negative. */
13976 else if (INT_CST_LT (integer_minus_one_node, arg0))
13978 /* If the value is negative, then the absolute value is
13982 unsigned HOST_WIDE_INT low;
13983 HOST_WIDE_INT high;
13984 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13985 TREE_INT_CST_HIGH (arg0),
13987 t = force_fit_type_double (type, low, high, -1,
13988 overflow | TREE_OVERFLOW (arg0));
13993 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13994 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14000 gcc_unreachable ();
14006 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14007 constant. TYPE is the type of the result. */
14010 fold_not_const (tree arg0, tree type)
14012 tree t = NULL_TREE;
14014 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14016 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14017 ~TREE_INT_CST_HIGH (arg0), 0,
14018 TREE_OVERFLOW (arg0));
14023 /* Given CODE, a relational operator, the target type, TYPE and two
14024 constant operands OP0 and OP1, return the result of the
14025 relational operation. If the result is not a compile time
14026 constant, then return NULL_TREE. */
14029 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14031 int result, invert;
14033 /* From here on, the only cases we handle are when the result is
14034 known to be a constant. */
14036 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14038 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14039 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14041 /* Handle the cases where either operand is a NaN. */
14042 if (real_isnan (c0) || real_isnan (c1))
14052 case UNORDERED_EXPR:
14066 if (flag_trapping_math)
14072 gcc_unreachable ();
14075 return constant_boolean_node (result, type);
14078 return constant_boolean_node (real_compare (code, c0, c1), type);
14081 /* Handle equality/inequality of complex constants. */
14082 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14084 tree rcond = fold_relational_const (code, type,
14085 TREE_REALPART (op0),
14086 TREE_REALPART (op1));
14087 tree icond = fold_relational_const (code, type,
14088 TREE_IMAGPART (op0),
14089 TREE_IMAGPART (op1));
14090 if (code == EQ_EXPR)
14091 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14092 else if (code == NE_EXPR)
14093 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14098 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14100 To compute GT, swap the arguments and do LT.
14101 To compute GE, do LT and invert the result.
14102 To compute LE, swap the arguments, do LT and invert the result.
14103 To compute NE, do EQ and invert the result.
14105 Therefore, the code below must handle only EQ and LT. */
14107 if (code == LE_EXPR || code == GT_EXPR)
14112 code = swap_tree_comparison (code);
14115 /* Note that it is safe to invert for real values here because we
14116 have already handled the one case that it matters. */
14119 if (code == NE_EXPR || code == GE_EXPR)
14122 code = invert_tree_comparison (code, false);
14125 /* Compute a result for LT or EQ if args permit;
14126 Otherwise return T. */
14127 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14129 if (code == EQ_EXPR)
14130 result = tree_int_cst_equal (op0, op1);
14131 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14132 result = INT_CST_LT_UNSIGNED (op0, op1);
14134 result = INT_CST_LT (op0, op1);
14141 return constant_boolean_node (result, type);
14144 /* Build an expression for the a clean point containing EXPR with type TYPE.
14145 Don't build a cleanup point expression for EXPR which don't have side
14149 fold_build_cleanup_point_expr (tree type, tree expr)
14151 /* If the expression does not have side effects then we don't have to wrap
14152 it with a cleanup point expression. */
14153 if (!TREE_SIDE_EFFECTS (expr))
14156 /* If the expression is a return, check to see if the expression inside the
14157 return has no side effects or the right hand side of the modify expression
14158 inside the return. If either don't have side effects set we don't need to
14159 wrap the expression in a cleanup point expression. Note we don't check the
14160 left hand side of the modify because it should always be a return decl. */
14161 if (TREE_CODE (expr) == RETURN_EXPR)
14163 tree op = TREE_OPERAND (expr, 0);
14164 if (!op || !TREE_SIDE_EFFECTS (op))
14166 op = TREE_OPERAND (op, 1);
14167 if (!TREE_SIDE_EFFECTS (op))
14171 return build1 (CLEANUP_POINT_EXPR, type, expr);
14174 /* Build an expression for the address of T. Folds away INDIRECT_REF to
14175 avoid confusing the gimplify process. */
14178 build_fold_addr_expr_with_type (tree t, tree ptrtype)
14180 /* The size of the object is not relevant when talking about its address. */
14181 if (TREE_CODE (t) == WITH_SIZE_EXPR)
14182 t = TREE_OPERAND (t, 0);
14184 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
14185 if (TREE_CODE (t) == INDIRECT_REF
14186 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
14188 t = TREE_OPERAND (t, 0);
14189 if (TREE_TYPE (t) != ptrtype)
14190 t = build1 (NOP_EXPR, ptrtype, t);
14196 while (handled_component_p (base))
14197 base = TREE_OPERAND (base, 0);
14199 TREE_ADDRESSABLE (base) = 1;
14201 t = build1 (ADDR_EXPR, ptrtype, t);
14208 build_fold_addr_expr (tree t)
14210 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
14213 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14214 of an indirection through OP0, or NULL_TREE if no simplification is
14218 fold_indirect_ref_1 (tree type, tree op0)
14224 subtype = TREE_TYPE (sub);
14225 if (!POINTER_TYPE_P (subtype))
14228 if (TREE_CODE (sub) == ADDR_EXPR)
14230 tree op = TREE_OPERAND (sub, 0);
14231 tree optype = TREE_TYPE (op);
14232 /* *&CONST_DECL -> to the value of the const decl. */
14233 if (TREE_CODE (op) == CONST_DECL)
14234 return DECL_INITIAL (op);
14235 /* *&p => p; make sure to handle *&"str"[cst] here. */
14236 if (type == optype)
14238 tree fop = fold_read_from_constant_string (op);
14244 /* *(foo *)&fooarray => fooarray[0] */
14245 else if (TREE_CODE (optype) == ARRAY_TYPE
14246 && type == TREE_TYPE (optype))
14248 tree type_domain = TYPE_DOMAIN (optype);
14249 tree min_val = size_zero_node;
14250 if (type_domain && TYPE_MIN_VALUE (type_domain))
14251 min_val = TYPE_MIN_VALUE (type_domain);
14252 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14254 /* *(foo *)&complexfoo => __real__ complexfoo */
14255 else if (TREE_CODE (optype) == COMPLEX_TYPE
14256 && type == TREE_TYPE (optype))
14257 return fold_build1 (REALPART_EXPR, type, op);
14258 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14259 else if (TREE_CODE (optype) == VECTOR_TYPE
14260 && type == TREE_TYPE (optype))
14262 tree part_width = TYPE_SIZE (type);
14263 tree index = bitsize_int (0);
14264 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14268 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14269 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14270 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14272 tree op00 = TREE_OPERAND (sub, 0);
14273 tree op01 = TREE_OPERAND (sub, 1);
14277 op00type = TREE_TYPE (op00);
14278 if (TREE_CODE (op00) == ADDR_EXPR
14279 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14280 && type == TREE_TYPE (TREE_TYPE (op00type)))
14282 tree size = TYPE_SIZE_UNIT (type);
14283 if (tree_int_cst_equal (size, op01))
14284 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14288 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14289 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14290 && type == TREE_TYPE (TREE_TYPE (subtype)))
14293 tree min_val = size_zero_node;
14294 sub = build_fold_indirect_ref (sub);
14295 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14296 if (type_domain && TYPE_MIN_VALUE (type_domain))
14297 min_val = TYPE_MIN_VALUE (type_domain);
14298 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14304 /* Builds an expression for an indirection through T, simplifying some
14308 build_fold_indirect_ref (tree t)
14310 tree type = TREE_TYPE (TREE_TYPE (t));
14311 tree sub = fold_indirect_ref_1 (type, t);
14316 return build1 (INDIRECT_REF, type, t);
14319 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14322 fold_indirect_ref (tree t)
14324 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14332 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14333 whose result is ignored. The type of the returned tree need not be
14334 the same as the original expression. */
14337 fold_ignored_result (tree t)
14339 if (!TREE_SIDE_EFFECTS (t))
14340 return integer_zero_node;
14343 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14346 t = TREE_OPERAND (t, 0);
14350 case tcc_comparison:
14351 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14352 t = TREE_OPERAND (t, 0);
14353 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14354 t = TREE_OPERAND (t, 1);
14359 case tcc_expression:
14360 switch (TREE_CODE (t))
14362 case COMPOUND_EXPR:
14363 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14365 t = TREE_OPERAND (t, 0);
14369 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14370 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14372 t = TREE_OPERAND (t, 0);
14385 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14386 This can only be applied to objects of a sizetype. */
14389 round_up (tree value, int divisor)
14391 tree div = NULL_TREE;
14393 gcc_assert (divisor > 0);
14397 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14398 have to do anything. Only do this when we are not given a const,
14399 because in that case, this check is more expensive than just
14401 if (TREE_CODE (value) != INTEGER_CST)
14403 div = build_int_cst (TREE_TYPE (value), divisor);
14405 if (multiple_of_p (TREE_TYPE (value), value, div))
14409 /* If divisor is a power of two, simplify this to bit manipulation. */
14410 if (divisor == (divisor & -divisor))
14412 if (TREE_CODE (value) == INTEGER_CST)
14414 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14415 unsigned HOST_WIDE_INT high;
14418 if ((low & (divisor - 1)) == 0)
14421 overflow_p = TREE_OVERFLOW (value);
14422 high = TREE_INT_CST_HIGH (value);
14423 low &= ~(divisor - 1);
14432 return force_fit_type_double (TREE_TYPE (value), low, high,
14439 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14440 value = size_binop (PLUS_EXPR, value, t);
14441 t = build_int_cst (TREE_TYPE (value), -divisor);
14442 value = size_binop (BIT_AND_EXPR, value, t);
14448 div = build_int_cst (TREE_TYPE (value), divisor);
14449 value = size_binop (CEIL_DIV_EXPR, value, div);
14450 value = size_binop (MULT_EXPR, value, div);
14456 /* Likewise, but round down. */
14459 round_down (tree value, int divisor)
14461 tree div = NULL_TREE;
14463 gcc_assert (divisor > 0);
14467 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14468 have to do anything. Only do this when we are not given a const,
14469 because in that case, this check is more expensive than just
14471 if (TREE_CODE (value) != INTEGER_CST)
14473 div = build_int_cst (TREE_TYPE (value), divisor);
14475 if (multiple_of_p (TREE_TYPE (value), value, div))
14479 /* If divisor is a power of two, simplify this to bit manipulation. */
14480 if (divisor == (divisor & -divisor))
14484 t = build_int_cst (TREE_TYPE (value), -divisor);
14485 value = size_binop (BIT_AND_EXPR, value, t);
14490 div = build_int_cst (TREE_TYPE (value), divisor);
14491 value = size_binop (FLOOR_DIV_EXPR, value, div);
14492 value = size_binop (MULT_EXPR, value, div);
14498 /* Returns the pointer to the base of the object addressed by EXP and
14499 extracts the information about the offset of the access, storing it
14500 to PBITPOS and POFFSET. */
14503 split_address_to_core_and_offset (tree exp,
14504 HOST_WIDE_INT *pbitpos, tree *poffset)
14507 enum machine_mode mode;
14508 int unsignedp, volatilep;
14509 HOST_WIDE_INT bitsize;
14511 if (TREE_CODE (exp) == ADDR_EXPR)
14513 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14514 poffset, &mode, &unsignedp, &volatilep,
14516 core = build_fold_addr_expr (core);
14522 *poffset = NULL_TREE;
14528 /* Returns true if addresses of E1 and E2 differ by a constant, false
14529 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14532 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14535 HOST_WIDE_INT bitpos1, bitpos2;
14536 tree toffset1, toffset2, tdiff, type;
14538 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14539 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14541 if (bitpos1 % BITS_PER_UNIT != 0
14542 || bitpos2 % BITS_PER_UNIT != 0
14543 || !operand_equal_p (core1, core2, 0))
14546 if (toffset1 && toffset2)
14548 type = TREE_TYPE (toffset1);
14549 if (type != TREE_TYPE (toffset2))
14550 toffset2 = fold_convert (type, toffset2);
14552 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14553 if (!cst_and_fits_in_hwi (tdiff))
14556 *diff = int_cst_value (tdiff);
14558 else if (toffset1 || toffset2)
14560 /* If only one of the offsets is non-constant, the difference cannot
14567 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14571 /* Simplify the floating point expression EXP when the sign of the
14572 result is not significant. Return NULL_TREE if no simplification
14576 fold_strip_sign_ops (tree exp)
14580 switch (TREE_CODE (exp))
14584 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14585 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14589 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14591 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14592 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14593 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14594 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14595 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14596 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14599 case COMPOUND_EXPR:
14600 arg0 = TREE_OPERAND (exp, 0);
14601 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14603 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14607 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14608 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14610 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14611 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14612 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14617 const enum built_in_function fcode = builtin_mathfn_code (exp);
14620 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14621 /* Strip copysign function call, return the 1st argument. */
14622 arg0 = CALL_EXPR_ARG (exp, 0);
14623 arg1 = CALL_EXPR_ARG (exp, 1);
14624 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14627 /* Strip sign ops from the argument of "odd" math functions. */
14628 if (negate_mathfn_p (fcode))
14630 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14632 return build_call_expr (get_callee_fndecl (exp), 1, arg0);